From 9325591c4df9c651e05d02e7b24943f50b272124 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 26 Sep 2023 21:52:23 +0000 Subject: [PATCH 1/3] feat:Enable Vertex AI Ingestion on DataPlex PiperOrigin-RevId: 568623212 Source-Link: https://github.com/googleapis/googleapis/commit/f878578ebea3b9735741aa2e3c8aeb01010b0ad4 Source-Link: https://github.com/googleapis/googleapis-gen/commit/149c56403e049b3b1d1a241f1b9fb68200689c0b Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFjYXRhbG9nLy5Pd2xCb3QueWFtbCIsImgiOiIxNDljNTY0MDNlMDQ5YjNiMWQxYTI0MWYxYjlmYjY4MjAwNjg5YzBiIn0= --- .../google-cloud-datacatalog/v1/.coveragerc | 13 + .../google-cloud-datacatalog/v1/.flake8 | 33 + .../google-cloud-datacatalog/v1/MANIFEST.in | 2 + .../google-cloud-datacatalog/v1/README.rst | 49 + .../v1/docs/_static/custom.css | 3 + .../google-cloud-datacatalog/v1/docs/conf.py | 376 + .../v1/docs/datacatalog_v1/data_catalog.rst | 10 + .../datacatalog_v1/policy_tag_manager.rst | 10 + .../policy_tag_manager_serialization.rst | 6 + .../v1/docs/datacatalog_v1/services.rst | 8 + .../v1/docs/datacatalog_v1/types.rst | 6 + .../v1/docs/index.rst | 7 + .../v1/google/cloud/datacatalog/__init__.py | 273 + .../google/cloud/datacatalog/gapic_version.py | 16 + .../v1/google/cloud/datacatalog/py.typed | 2 + .../google/cloud/datacatalog_v1/__init__.py | 274 + .../cloud/datacatalog_v1/gapic_metadata.json | 551 + .../cloud/datacatalog_v1/gapic_version.py | 16 + .../v1/google/cloud/datacatalog_v1/py.typed | 2 + .../cloud/datacatalog_v1/services/__init__.py | 15 + .../services/data_catalog/__init__.py | 22 + .../services/data_catalog/async_client.py | 4510 +++++++ .../services/data_catalog/client.py | 4761 +++++++ .../services/data_catalog/pagers.py | 504 + .../data_catalog/transports/__init__.py | 33 + .../services/data_catalog/transports/base.py | 657 + .../services/data_catalog/transports/grpc.py | 1469 +++ .../data_catalog/transports/grpc_asyncio.py | 1468 +++ .../services/policy_tag_manager/__init__.py | 22 + .../policy_tag_manager/async_client.py | 1819 +++ .../services/policy_tag_manager/client.py | 2029 +++ .../services/policy_tag_manager/pagers.py | 260 + .../policy_tag_manager/transports/__init__.py | 33 + .../policy_tag_manager/transports/base.py | 356 + .../policy_tag_manager/transports/grpc.py | 671 + .../transports/grpc_asyncio.py | 670 + .../__init__.py | 22 + .../async_client.py | 699 ++ .../client.py | 906 ++ .../transports/__init__.py | 33 + .../transports/base.py | 216 + .../transports/grpc.py | 422 + .../transports/grpc_asyncio.py | 421 + .../cloud/datacatalog_v1/types/__init__.py | 288 + .../cloud/datacatalog_v1/types/bigquery.py | 136 + .../cloud/datacatalog_v1/types/common.py | 112 + .../cloud/datacatalog_v1/types/data_source.py | 132 + .../cloud/datacatalog_v1/types/datacatalog.py | 2727 ++++ .../datacatalog_v1/types/dataplex_spec.py | 170 + .../datacatalog_v1/types/dump_content.py | 95 + .../datacatalog_v1/types/gcs_fileset_spec.py | 119 + .../datacatalog_v1/types/physical_schema.py | 158 + .../datacatalog_v1/types/policytagmanager.py | 551 + .../types/policytagmanagerserialization.py | 288 + .../cloud/datacatalog_v1/types/schema.py | 204 + .../cloud/datacatalog_v1/types/search.py | 183 + .../cloud/datacatalog_v1/types/table_spec.py | 178 + .../google/cloud/datacatalog_v1/types/tags.py | 466 + .../cloud/datacatalog_v1/types/timestamps.py | 72 + .../cloud/datacatalog_v1/types/usage.py | 156 + .../google-cloud-datacatalog/v1/mypy.ini | 3 + .../google-cloud-datacatalog/v1/noxfile.py | 184 + ...nerated_data_catalog_create_entry_async.py | 59 + ...d_data_catalog_create_entry_group_async.py | 53 + ...ed_data_catalog_create_entry_group_sync.py | 53 + ...enerated_data_catalog_create_entry_sync.py | 59 + ...generated_data_catalog_create_tag_async.py | 57 + ..._generated_data_catalog_create_tag_sync.py | 57 + ..._data_catalog_create_tag_template_async.py | 53 + ...catalog_create_tag_template_field_async.py | 57 + ..._catalog_create_tag_template_field_sync.py | 57 + ...d_data_catalog_create_tag_template_sync.py | 53 + ...nerated_data_catalog_delete_entry_async.py | 50 + ...d_data_catalog_delete_entry_group_async.py | 50 + ...ed_data_catalog_delete_entry_group_sync.py | 50 + ...enerated_data_catalog_delete_entry_sync.py | 50 + ...generated_data_catalog_delete_tag_async.py | 50 + ..._generated_data_catalog_delete_tag_sync.py | 50 + ..._data_catalog_delete_tag_template_async.py | 51 + ...catalog_delete_tag_template_field_async.py | 51 + ..._catalog_delete_tag_template_field_sync.py | 51 + ...d_data_catalog_delete_tag_template_sync.py | 51 + ..._generated_data_catalog_get_entry_async.py | 52 + ...ated_data_catalog_get_entry_group_async.py | 52 + ...rated_data_catalog_get_entry_group_sync.py | 52 + ...1_generated_data_catalog_get_entry_sync.py | 52 + ...rated_data_catalog_get_iam_policy_async.py | 53 + ...erated_data_catalog_get_iam_policy_sync.py | 53 + ...ted_data_catalog_get_tag_template_async.py | 52 + ...ated_data_catalog_get_tag_template_sync.py | 52 + ...rated_data_catalog_import_entries_async.py | 57 + ...erated_data_catalog_import_entries_sync.py | 57 + ...nerated_data_catalog_list_entries_async.py | 53 + ...enerated_data_catalog_list_entries_sync.py | 53 + ...ed_data_catalog_list_entry_groups_async.py | 53 + ...ted_data_catalog_list_entry_groups_sync.py | 53 + ..._generated_data_catalog_list_tags_async.py | 53 + ...1_generated_data_catalog_list_tags_sync.py | 53 + ...nerated_data_catalog_lookup_entry_async.py | 52 + ...enerated_data_catalog_lookup_entry_sync.py | 52 + ...ata_catalog_modify_entry_contacts_async.py | 52 + ...data_catalog_modify_entry_contacts_sync.py | 52 + ...ata_catalog_modify_entry_overview_async.py | 52 + ...data_catalog_modify_entry_overview_sync.py | 52 + ...rated_data_catalog_reconcile_tags_async.py | 57 + ...erated_data_catalog_reconcile_tags_sync.py | 57 + ...catalog_rename_tag_template_field_async.py | 53 + ...ame_tag_template_field_enum_value_async.py | 53 + ...name_tag_template_field_enum_value_sync.py | 53 + ..._catalog_rename_tag_template_field_sync.py | 53 + ...rated_data_catalog_search_catalog_async.py | 52 + ...erated_data_catalog_search_catalog_sync.py | 52 + ...rated_data_catalog_set_iam_policy_async.py | 53 + ...erated_data_catalog_set_iam_policy_sync.py | 53 + ...generated_data_catalog_star_entry_async.py | 52 + ..._generated_data_catalog_star_entry_sync.py | 52 + ...data_catalog_test_iam_permissions_async.py | 54 + ..._data_catalog_test_iam_permissions_sync.py | 54 + ...nerated_data_catalog_unstar_entry_async.py | 52 + ...enerated_data_catalog_unstar_entry_sync.py | 52 + ...nerated_data_catalog_update_entry_async.py | 57 + ...d_data_catalog_update_entry_group_async.py | 51 + ...ed_data_catalog_update_entry_group_sync.py | 51 + ...enerated_data_catalog_update_entry_sync.py | 57 + ...generated_data_catalog_update_tag_async.py | 56 + ..._generated_data_catalog_update_tag_sync.py | 56 + ..._data_catalog_update_tag_template_async.py | 51 + ...catalog_update_tag_template_field_async.py | 56 + ..._catalog_update_tag_template_field_sync.py | 56 + ...d_data_catalog_update_tag_template_sync.py | 51 + ...icy_tag_manager_create_policy_tag_async.py | 52 + ...licy_tag_manager_create_policy_tag_sync.py | 52 + ...olicy_tag_manager_create_taxonomy_async.py | 52 + ...policy_tag_manager_create_taxonomy_sync.py | 52 + ...icy_tag_manager_delete_policy_tag_async.py | 50 + ...licy_tag_manager_delete_policy_tag_sync.py | 50 + ...olicy_tag_manager_delete_taxonomy_async.py | 50 + ...policy_tag_manager_delete_taxonomy_sync.py | 50 + ...policy_tag_manager_get_iam_policy_async.py | 53 + ..._policy_tag_manager_get_iam_policy_sync.py | 53 + ...policy_tag_manager_get_policy_tag_async.py | 52 + ..._policy_tag_manager_get_policy_tag_sync.py | 52 + ...d_policy_tag_manager_get_taxonomy_async.py | 52 + ...ed_policy_tag_manager_get_taxonomy_sync.py | 52 + ...licy_tag_manager_list_policy_tags_async.py | 53 + ...olicy_tag_manager_list_policy_tags_sync.py | 53 + ...olicy_tag_manager_list_taxonomies_async.py | 53 + ...policy_tag_manager_list_taxonomies_sync.py | 53 + ...r_serialization_export_taxonomies_async.py | 54 + ...er_serialization_export_taxonomies_sync.py | 54 + ...r_serialization_import_taxonomies_async.py | 56 + ...er_serialization_import_taxonomies_sync.py | 56 + ...er_serialization_replace_taxonomy_async.py | 56 + ...ger_serialization_replace_taxonomy_sync.py | 56 + ...policy_tag_manager_set_iam_policy_async.py | 53 + ..._policy_tag_manager_set_iam_policy_sync.py | 53 + ..._tag_manager_test_iam_permissions_async.py | 54 + ...y_tag_manager_test_iam_permissions_sync.py | 54 + ...icy_tag_manager_update_policy_tag_async.py | 51 + ...licy_tag_manager_update_policy_tag_sync.py | 51 + ...olicy_tag_manager_update_taxonomy_async.py | 51 + ...policy_tag_manager_update_taxonomy_sync.py | 51 + ..._metadata_google.cloud.datacatalog.v1.json | 8111 ++++++++++++ .../scripts/fixup_datacatalog_v1_keywords.py | 222 + .../google-cloud-datacatalog/v1/setup.py | 91 + .../v1/testing/constraints-3.10.txt | 7 + .../v1/testing/constraints-3.11.txt | 7 + .../v1/testing/constraints-3.12.txt | 7 + .../v1/testing/constraints-3.7.txt | 10 + .../v1/testing/constraints-3.8.txt | 7 + .../v1/testing/constraints-3.9.txt | 7 + .../v1/tests/__init__.py | 16 + .../v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../unit/gapic/datacatalog_v1/__init__.py | 16 + .../gapic/datacatalog_v1/test_data_catalog.py | 10354 ++++++++++++++++ .../datacatalog_v1/test_policy_tag_manager.py | 5041 ++++++++ .../test_policy_tag_manager_serialization.py | 2144 ++++ .../v1beta1/.coveragerc | 13 + .../google-cloud-datacatalog/v1beta1/.flake8 | 33 + .../v1beta1/MANIFEST.in | 2 + .../v1beta1/README.rst | 49 + .../v1beta1/docs/_static/custom.css | 3 + .../v1beta1/docs/conf.py | 376 + .../docs/datacatalog_v1beta1/data_catalog.rst | 10 + .../policy_tag_manager.rst | 10 + .../policy_tag_manager_serialization.rst | 6 + .../docs/datacatalog_v1beta1/services.rst | 8 + .../docs/datacatalog_v1beta1/types.rst | 6 + .../v1beta1/docs/index.rst | 7 + .../google/cloud/datacatalog/__init__.py | 183 + .../google/cloud/datacatalog/gapic_version.py | 16 + .../v1beta1/google/cloud/datacatalog/py.typed | 2 + .../cloud/datacatalog_v1beta1/__init__.py | 184 + .../datacatalog_v1beta1/gapic_metadata.json | 481 + .../datacatalog_v1beta1/gapic_version.py | 16 + .../google/cloud/datacatalog_v1beta1/py.typed | 2 + .../datacatalog_v1beta1/services/__init__.py | 15 + .../services/data_catalog/__init__.py | 22 + .../services/data_catalog/async_client.py | 3653 ++++++ .../services/data_catalog/client.py | 3904 ++++++ .../services/data_catalog/pagers.py | 504 + .../data_catalog/transports/__init__.py | 33 + .../services/data_catalog/transports/base.py | 531 + .../services/data_catalog/transports/grpc.py | 1122 ++ .../data_catalog/transports/grpc_asyncio.py | 1121 ++ .../services/policy_tag_manager/__init__.py | 22 + .../policy_tag_manager/async_client.py | 1582 +++ .../services/policy_tag_manager/client.py | 1796 +++ .../services/policy_tag_manager/pagers.py | 260 + .../policy_tag_manager/transports/__init__.py | 33 + .../policy_tag_manager/transports/base.py | 320 + .../policy_tag_manager/transports/grpc.py | 586 + .../transports/grpc_asyncio.py | 585 + .../__init__.py | 22 + .../async_client.py | 380 + .../client.py | 590 + .../transports/__init__.py | 33 + .../transports/base.py | 165 + .../transports/grpc.py | 303 + .../transports/grpc_asyncio.py | 302 + .../datacatalog_v1beta1/types/__init__.py | 184 + .../cloud/datacatalog_v1beta1/types/common.py | 66 + .../datacatalog_v1beta1/types/datacatalog.py | 1363 ++ .../types/gcs_fileset_spec.py | 117 + .../types/policytagmanager.py | 520 + .../types/policytagmanagerserialization.py | 234 + .../cloud/datacatalog_v1beta1/types/schema.py | 93 + .../cloud/datacatalog_v1beta1/types/search.py | 114 + .../datacatalog_v1beta1/types/table_spec.py | 165 + .../cloud/datacatalog_v1beta1/types/tags.py | 407 + .../datacatalog_v1beta1/types/timestamps.py | 67 + .../cloud/datacatalog_v1beta1/types/usage.py | 104 + .../google-cloud-datacatalog/v1beta1/mypy.ini | 3 + .../v1beta1/noxfile.py | 184 + ...nerated_data_catalog_create_entry_async.py | 59 + ...d_data_catalog_create_entry_group_async.py | 53 + ...ed_data_catalog_create_entry_group_sync.py | 53 + ...enerated_data_catalog_create_entry_sync.py | 59 + ...generated_data_catalog_create_tag_async.py | 57 + ..._generated_data_catalog_create_tag_sync.py | 57 + ..._data_catalog_create_tag_template_async.py | 53 + ...catalog_create_tag_template_field_async.py | 57 + ..._catalog_create_tag_template_field_sync.py | 57 + ...d_data_catalog_create_tag_template_sync.py | 53 + ...nerated_data_catalog_delete_entry_async.py | 50 + ...d_data_catalog_delete_entry_group_async.py | 50 + ...ed_data_catalog_delete_entry_group_sync.py | 50 + ...enerated_data_catalog_delete_entry_sync.py | 50 + ...generated_data_catalog_delete_tag_async.py | 50 + ..._generated_data_catalog_delete_tag_sync.py | 50 + ..._data_catalog_delete_tag_template_async.py | 51 + ...catalog_delete_tag_template_field_async.py | 51 + ..._catalog_delete_tag_template_field_sync.py | 51 + ...d_data_catalog_delete_tag_template_sync.py | 51 + ..._generated_data_catalog_get_entry_async.py | 52 + ...ated_data_catalog_get_entry_group_async.py | 52 + ...rated_data_catalog_get_entry_group_sync.py | 52 + ...1_generated_data_catalog_get_entry_sync.py | 52 + ...rated_data_catalog_get_iam_policy_async.py | 53 + ...erated_data_catalog_get_iam_policy_sync.py | 53 + ...ted_data_catalog_get_tag_template_async.py | 52 + ...ated_data_catalog_get_tag_template_sync.py | 52 + ...nerated_data_catalog_list_entries_async.py | 53 + ...enerated_data_catalog_list_entries_sync.py | 53 + ...ed_data_catalog_list_entry_groups_async.py | 53 + ...ted_data_catalog_list_entry_groups_sync.py | 53 + ..._generated_data_catalog_list_tags_async.py | 53 + ...1_generated_data_catalog_list_tags_sync.py | 53 + ...nerated_data_catalog_lookup_entry_async.py | 52 + ...enerated_data_catalog_lookup_entry_sync.py | 52 + ...catalog_rename_tag_template_field_async.py | 53 + ...ame_tag_template_field_enum_value_async.py | 53 + ...name_tag_template_field_enum_value_sync.py | 53 + ..._catalog_rename_tag_template_field_sync.py | 53 + ...rated_data_catalog_search_catalog_async.py | 52 + ...erated_data_catalog_search_catalog_sync.py | 52 + ...rated_data_catalog_set_iam_policy_async.py | 53 + ...erated_data_catalog_set_iam_policy_sync.py | 53 + ...data_catalog_test_iam_permissions_async.py | 54 + ..._data_catalog_test_iam_permissions_sync.py | 54 + ...nerated_data_catalog_update_entry_async.py | 57 + ...d_data_catalog_update_entry_group_async.py | 51 + ...ed_data_catalog_update_entry_group_sync.py | 51 + ...enerated_data_catalog_update_entry_sync.py | 57 + ...generated_data_catalog_update_tag_async.py | 56 + ..._generated_data_catalog_update_tag_sync.py | 56 + ..._data_catalog_update_tag_template_async.py | 51 + ...catalog_update_tag_template_field_async.py | 56 + ..._catalog_update_tag_template_field_sync.py | 56 + ...d_data_catalog_update_tag_template_sync.py | 51 + ...icy_tag_manager_create_policy_tag_async.py | 52 + ...licy_tag_manager_create_policy_tag_sync.py | 52 + ...olicy_tag_manager_create_taxonomy_async.py | 52 + ...policy_tag_manager_create_taxonomy_sync.py | 52 + ...icy_tag_manager_delete_policy_tag_async.py | 50 + ...licy_tag_manager_delete_policy_tag_sync.py | 50 + ...olicy_tag_manager_delete_taxonomy_async.py | 50 + ...policy_tag_manager_delete_taxonomy_sync.py | 50 + ...policy_tag_manager_get_iam_policy_async.py | 53 + ..._policy_tag_manager_get_iam_policy_sync.py | 53 + ...policy_tag_manager_get_policy_tag_async.py | 52 + ..._policy_tag_manager_get_policy_tag_sync.py | 52 + ...d_policy_tag_manager_get_taxonomy_async.py | 52 + ...ed_policy_tag_manager_get_taxonomy_sync.py | 52 + ...licy_tag_manager_list_policy_tags_async.py | 53 + ...olicy_tag_manager_list_policy_tags_sync.py | 53 + ...olicy_tag_manager_list_taxonomies_async.py | 53 + ...policy_tag_manager_list_taxonomies_sync.py | 53 + ...r_serialization_export_taxonomies_async.py | 54 + ...er_serialization_export_taxonomies_sync.py | 54 + ...r_serialization_import_taxonomies_async.py | 56 + ...er_serialization_import_taxonomies_sync.py | 56 + ...policy_tag_manager_set_iam_policy_async.py | 53 + ..._policy_tag_manager_set_iam_policy_sync.py | 53 + ..._tag_manager_test_iam_permissions_async.py | 54 + ...y_tag_manager_test_iam_permissions_sync.py | 54 + ...icy_tag_manager_update_policy_tag_async.py | 51 + ...licy_tag_manager_update_policy_tag_sync.py | 51 + ...olicy_tag_manager_update_taxonomy_async.py | 51 + ...policy_tag_manager_update_taxonomy_sync.py | 51 + ...data_google.cloud.datacatalog.v1beta1.json | 7024 +++++++++++ .../fixup_datacatalog_v1beta1_keywords.py | 215 + .../google-cloud-datacatalog/v1beta1/setup.py | 91 + .../v1beta1/testing/constraints-3.10.txt | 7 + .../v1beta1/testing/constraints-3.11.txt | 7 + .../v1beta1/testing/constraints-3.12.txt | 7 + .../v1beta1/testing/constraints-3.7.txt | 10 + .../v1beta1/testing/constraints-3.8.txt | 7 + .../v1beta1/testing/constraints-3.9.txt | 7 + .../v1beta1/tests/__init__.py | 16 + .../v1beta1/tests/unit/__init__.py | 16 + .../v1beta1/tests/unit/gapic/__init__.py | 16 + .../gapic/datacatalog_v1beta1/__init__.py | 16 + .../datacatalog_v1beta1/test_data_catalog.py | 8709 +++++++++++++ .../test_policy_tag_manager.py | 4521 +++++++ .../test_policy_tag_manager_serialization.py | 1456 +++ 337 files changed, 110854 insertions(+) create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/.coveragerc create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/.flake8 create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/MANIFEST.in create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/README.rst create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/docs/_static/custom.css create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/docs/conf.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/data_catalog.rst create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/policy_tag_manager.rst create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/policy_tag_manager_serialization.rst create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/services.rst create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/types.rst create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/docs/index.rst create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/py.typed create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/gapic_metadata.json create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/py.typed create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/async_client.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/client.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/pagers.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/pagers.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/base.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/bigquery.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/common.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/data_source.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/datacatalog.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/dataplex_spec.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/dump_content.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/gcs_fileset_spec.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/physical_schema.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/policytagmanager.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/policytagmanagerserialization.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/schema.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/search.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/table_spec.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/tags.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/timestamps.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/usage.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/mypy.ini create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/noxfile.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_group_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_group_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_field_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_field_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_group_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_group_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_field_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_field_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_group_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_group_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_iam_policy_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_iam_policy_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_tag_template_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_tag_template_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_import_entries_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_import_entries_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entries_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entries_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entry_groups_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entry_groups_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_tags_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_tags_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_lookup_entry_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_lookup_entry_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_contacts_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_contacts_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_overview_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_overview_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_reconcile_tags_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_reconcile_tags_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_search_catalog_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_search_catalog_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_iam_policy_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_iam_policy_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_star_entry_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_star_entry_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_test_iam_permissions_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_test_iam_permissions_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_unstar_entry_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_unstar_entry_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_group_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_group_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_field_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_field_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_policy_tag_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_policy_tag_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_taxonomy_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_taxonomy_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_iam_policy_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_iam_policy_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_policy_tag_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_policy_tag_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_taxonomy_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_taxonomy_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_policy_tags_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_policy_tags_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_taxonomies_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_taxonomies_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_set_iam_policy_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_set_iam_policy_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_policy_tag_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_policy_tag_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_taxonomy_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_taxonomy_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/scripts/fixup_datacatalog_v1_keywords.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/setup.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/tests/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_data_catalog.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/.coveragerc create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/.flake8 create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/MANIFEST.in create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/README.rst create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/_static/custom.css create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/conf.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/data_catalog.rst create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/policy_tag_manager.rst create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/policy_tag_manager_serialization.rst create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/services.rst create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/types.rst create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/index.rst create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/py.typed create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/gapic_metadata.json create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/gapic_version.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/py.typed create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/pagers.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/pagers.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/common.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/datacatalog.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/gcs_fileset_spec.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/policytagmanager.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/schema.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/search.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/table_spec.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/tags.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/timestamps.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/usage.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/mypy.ini create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/noxfile.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_group_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_group_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_group_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_group_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_group_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_group_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_iam_policy_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_iam_policy_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_tag_template_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_tag_template_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entries_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entries_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entry_groups_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entry_groups_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_tags_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_tags_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_lookup_entry_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_lookup_entry_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_search_catalog_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_search_catalog_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_set_iam_policy_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_set_iam_policy_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_group_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_group_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_async.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_sync.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/scripts/fixup_datacatalog_v1beta1_keywords.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/setup.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/__init__.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py create mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/.coveragerc b/owl-bot-staging/google-cloud-datacatalog/v1/.coveragerc new file mode 100644 index 000000000000..8d9d83e17533 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/datacatalog/__init__.py + google/cloud/datacatalog/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/.flake8 b/owl-bot-staging/google-cloud-datacatalog/v1/.flake8 new file mode 100644 index 000000000000..29227d4cf419 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/MANIFEST.in b/owl-bot-staging/google-cloud-datacatalog/v1/MANIFEST.in new file mode 100644 index 000000000000..7344043d20a5 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/datacatalog *.py +recursive-include google/cloud/datacatalog_v1 *.py diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/README.rst b/owl-bot-staging/google-cloud-datacatalog/v1/README.rst new file mode 100644 index 000000000000..8f53b24416fc --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Datacatalog API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Datacatalog API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-datacatalog/v1/docs/_static/custom.css new file mode 100644 index 000000000000..06423be0b592 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/docs/conf.py b/owl-bot-staging/google-cloud-datacatalog/v1/docs/conf.py new file mode 100644 index 000000000000..aec9c23130e4 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-datacatalog documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-datacatalog" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-datacatalog-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-datacatalog.tex", + u"google-cloud-datacatalog Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-datacatalog", + u"Google Cloud Datacatalog Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-datacatalog", + u"google-cloud-datacatalog Documentation", + author, + "google-cloud-datacatalog", + "GAPIC library for Google Cloud Datacatalog API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/data_catalog.rst b/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/data_catalog.rst new file mode 100644 index 000000000000..6141a1c7bc43 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/data_catalog.rst @@ -0,0 +1,10 @@ +DataCatalog +----------------------------- + +.. automodule:: google.cloud.datacatalog_v1.services.data_catalog + :members: + :inherited-members: + +.. automodule:: google.cloud.datacatalog_v1.services.data_catalog.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/policy_tag_manager.rst b/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/policy_tag_manager.rst new file mode 100644 index 000000000000..03d2846ff2e8 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/policy_tag_manager.rst @@ -0,0 +1,10 @@ +PolicyTagManager +---------------------------------- + +.. automodule:: google.cloud.datacatalog_v1.services.policy_tag_manager + :members: + :inherited-members: + +.. automodule:: google.cloud.datacatalog_v1.services.policy_tag_manager.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/policy_tag_manager_serialization.rst b/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/policy_tag_manager_serialization.rst new file mode 100644 index 000000000000..f7006d1b0825 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/policy_tag_manager_serialization.rst @@ -0,0 +1,6 @@ +PolicyTagManagerSerialization +----------------------------------------------- + +.. automodule:: google.cloud.datacatalog_v1.services.policy_tag_manager_serialization + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/services.rst b/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/services.rst new file mode 100644 index 000000000000..a70d3132fd05 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/services.rst @@ -0,0 +1,8 @@ +Services for Google Cloud Datacatalog v1 API +============================================ +.. toctree:: + :maxdepth: 2 + + data_catalog + policy_tag_manager + policy_tag_manager_serialization diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/types.rst b/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/types.rst new file mode 100644 index 000000000000..19f12ef87fd9 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Datacatalog v1 API +========================================= + +.. automodule:: google.cloud.datacatalog_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/docs/index.rst b/owl-bot-staging/google-cloud-datacatalog/v1/docs/index.rst new file mode 100644 index 000000000000..7af5288574f0 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + datacatalog_v1/services + datacatalog_v1/types diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/__init__.py new file mode 100644 index 000000000000..e667ef3fd25e --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/__init__.py @@ -0,0 +1,273 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.datacatalog import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.datacatalog_v1.services.data_catalog.client import DataCatalogClient +from google.cloud.datacatalog_v1.services.data_catalog.async_client import DataCatalogAsyncClient +from google.cloud.datacatalog_v1.services.policy_tag_manager.client import PolicyTagManagerClient +from google.cloud.datacatalog_v1.services.policy_tag_manager.async_client import PolicyTagManagerAsyncClient +from google.cloud.datacatalog_v1.services.policy_tag_manager_serialization.client import PolicyTagManagerSerializationClient +from google.cloud.datacatalog_v1.services.policy_tag_manager_serialization.async_client import PolicyTagManagerSerializationAsyncClient + +from google.cloud.datacatalog_v1.types.bigquery import BigQueryConnectionSpec +from google.cloud.datacatalog_v1.types.bigquery import BigQueryRoutineSpec +from google.cloud.datacatalog_v1.types.bigquery import CloudSqlBigQueryConnectionSpec +from google.cloud.datacatalog_v1.types.common import PersonalDetails +from google.cloud.datacatalog_v1.types.common import IntegratedSystem +from google.cloud.datacatalog_v1.types.common import ManagingSystem +from google.cloud.datacatalog_v1.types.data_source import DataSource +from google.cloud.datacatalog_v1.types.data_source import StorageProperties +from google.cloud.datacatalog_v1.types.datacatalog import BusinessContext +from google.cloud.datacatalog_v1.types.datacatalog import CloudBigtableInstanceSpec +from google.cloud.datacatalog_v1.types.datacatalog import CloudBigtableSystemSpec +from google.cloud.datacatalog_v1.types.datacatalog import Contacts +from google.cloud.datacatalog_v1.types.datacatalog import CreateEntryGroupRequest +from google.cloud.datacatalog_v1.types.datacatalog import CreateEntryRequest +from google.cloud.datacatalog_v1.types.datacatalog import CreateTagRequest +from google.cloud.datacatalog_v1.types.datacatalog import CreateTagTemplateFieldRequest +from google.cloud.datacatalog_v1.types.datacatalog import CreateTagTemplateRequest +from google.cloud.datacatalog_v1.types.datacatalog import DatabaseTableSpec +from google.cloud.datacatalog_v1.types.datacatalog import DatasetSpec +from google.cloud.datacatalog_v1.types.datacatalog import DataSourceConnectionSpec +from google.cloud.datacatalog_v1.types.datacatalog import DeleteEntryGroupRequest +from google.cloud.datacatalog_v1.types.datacatalog import DeleteEntryRequest +from google.cloud.datacatalog_v1.types.datacatalog import DeleteTagRequest +from google.cloud.datacatalog_v1.types.datacatalog import DeleteTagTemplateFieldRequest +from google.cloud.datacatalog_v1.types.datacatalog import DeleteTagTemplateRequest +from google.cloud.datacatalog_v1.types.datacatalog import Entry +from google.cloud.datacatalog_v1.types.datacatalog import EntryGroup +from google.cloud.datacatalog_v1.types.datacatalog import EntryOverview +from google.cloud.datacatalog_v1.types.datacatalog import FilesetSpec +from google.cloud.datacatalog_v1.types.datacatalog import GetEntryGroupRequest +from google.cloud.datacatalog_v1.types.datacatalog import GetEntryRequest +from google.cloud.datacatalog_v1.types.datacatalog import GetTagTemplateRequest +from google.cloud.datacatalog_v1.types.datacatalog import ImportEntriesMetadata +from google.cloud.datacatalog_v1.types.datacatalog import ImportEntriesRequest +from google.cloud.datacatalog_v1.types.datacatalog import ImportEntriesResponse +from google.cloud.datacatalog_v1.types.datacatalog import ListEntriesRequest +from google.cloud.datacatalog_v1.types.datacatalog import ListEntriesResponse +from google.cloud.datacatalog_v1.types.datacatalog import ListEntryGroupsRequest +from google.cloud.datacatalog_v1.types.datacatalog import ListEntryGroupsResponse +from google.cloud.datacatalog_v1.types.datacatalog import ListTagsRequest +from google.cloud.datacatalog_v1.types.datacatalog import ListTagsResponse +from google.cloud.datacatalog_v1.types.datacatalog import LookerSystemSpec +from google.cloud.datacatalog_v1.types.datacatalog import LookupEntryRequest +from google.cloud.datacatalog_v1.types.datacatalog import ModelSpec +from google.cloud.datacatalog_v1.types.datacatalog import ModifyEntryContactsRequest +from google.cloud.datacatalog_v1.types.datacatalog import ModifyEntryOverviewRequest +from google.cloud.datacatalog_v1.types.datacatalog import ReconcileTagsMetadata +from google.cloud.datacatalog_v1.types.datacatalog import ReconcileTagsRequest +from google.cloud.datacatalog_v1.types.datacatalog import ReconcileTagsResponse +from google.cloud.datacatalog_v1.types.datacatalog import RenameTagTemplateFieldEnumValueRequest +from google.cloud.datacatalog_v1.types.datacatalog import RenameTagTemplateFieldRequest +from google.cloud.datacatalog_v1.types.datacatalog import RoutineSpec +from google.cloud.datacatalog_v1.types.datacatalog import SearchCatalogRequest +from google.cloud.datacatalog_v1.types.datacatalog import SearchCatalogResponse +from google.cloud.datacatalog_v1.types.datacatalog import ServiceSpec +from google.cloud.datacatalog_v1.types.datacatalog import SqlDatabaseSystemSpec +from google.cloud.datacatalog_v1.types.datacatalog import StarEntryRequest +from google.cloud.datacatalog_v1.types.datacatalog import StarEntryResponse +from google.cloud.datacatalog_v1.types.datacatalog import UnstarEntryRequest +from google.cloud.datacatalog_v1.types.datacatalog import UnstarEntryResponse +from google.cloud.datacatalog_v1.types.datacatalog import UpdateEntryGroupRequest +from google.cloud.datacatalog_v1.types.datacatalog import UpdateEntryRequest +from google.cloud.datacatalog_v1.types.datacatalog import UpdateTagRequest +from google.cloud.datacatalog_v1.types.datacatalog import UpdateTagTemplateFieldRequest +from google.cloud.datacatalog_v1.types.datacatalog import UpdateTagTemplateRequest +from google.cloud.datacatalog_v1.types.datacatalog import VertexDatasetSpec +from google.cloud.datacatalog_v1.types.datacatalog import VertexModelSourceInfo +from google.cloud.datacatalog_v1.types.datacatalog import VertexModelSpec +from google.cloud.datacatalog_v1.types.datacatalog import EntryType +from google.cloud.datacatalog_v1.types.dataplex_spec import DataplexExternalTable +from google.cloud.datacatalog_v1.types.dataplex_spec import DataplexFilesetSpec +from google.cloud.datacatalog_v1.types.dataplex_spec import DataplexSpec +from google.cloud.datacatalog_v1.types.dataplex_spec import DataplexTableSpec +from google.cloud.datacatalog_v1.types.dump_content import DumpItem +from google.cloud.datacatalog_v1.types.dump_content import TaggedEntry +from google.cloud.datacatalog_v1.types.gcs_fileset_spec import GcsFilesetSpec +from google.cloud.datacatalog_v1.types.gcs_fileset_spec import GcsFileSpec +from google.cloud.datacatalog_v1.types.physical_schema import PhysicalSchema +from google.cloud.datacatalog_v1.types.policytagmanager import CreatePolicyTagRequest +from google.cloud.datacatalog_v1.types.policytagmanager import CreateTaxonomyRequest +from google.cloud.datacatalog_v1.types.policytagmanager import DeletePolicyTagRequest +from google.cloud.datacatalog_v1.types.policytagmanager import DeleteTaxonomyRequest +from google.cloud.datacatalog_v1.types.policytagmanager import GetPolicyTagRequest +from google.cloud.datacatalog_v1.types.policytagmanager import GetTaxonomyRequest +from google.cloud.datacatalog_v1.types.policytagmanager import ListPolicyTagsRequest +from google.cloud.datacatalog_v1.types.policytagmanager import ListPolicyTagsResponse +from google.cloud.datacatalog_v1.types.policytagmanager import ListTaxonomiesRequest +from google.cloud.datacatalog_v1.types.policytagmanager import ListTaxonomiesResponse +from google.cloud.datacatalog_v1.types.policytagmanager import PolicyTag +from google.cloud.datacatalog_v1.types.policytagmanager import Taxonomy +from google.cloud.datacatalog_v1.types.policytagmanager import UpdatePolicyTagRequest +from google.cloud.datacatalog_v1.types.policytagmanager import UpdateTaxonomyRequest +from google.cloud.datacatalog_v1.types.policytagmanagerserialization import CrossRegionalSource +from google.cloud.datacatalog_v1.types.policytagmanagerserialization import ExportTaxonomiesRequest +from google.cloud.datacatalog_v1.types.policytagmanagerserialization import ExportTaxonomiesResponse +from google.cloud.datacatalog_v1.types.policytagmanagerserialization import ImportTaxonomiesRequest +from google.cloud.datacatalog_v1.types.policytagmanagerserialization import ImportTaxonomiesResponse +from google.cloud.datacatalog_v1.types.policytagmanagerserialization import InlineSource +from google.cloud.datacatalog_v1.types.policytagmanagerserialization import ReplaceTaxonomyRequest +from google.cloud.datacatalog_v1.types.policytagmanagerserialization import SerializedPolicyTag +from google.cloud.datacatalog_v1.types.policytagmanagerserialization import SerializedTaxonomy +from google.cloud.datacatalog_v1.types.schema import ColumnSchema +from google.cloud.datacatalog_v1.types.schema import Schema +from google.cloud.datacatalog_v1.types.search import SearchCatalogResult +from google.cloud.datacatalog_v1.types.search import SearchResultType +from google.cloud.datacatalog_v1.types.table_spec import BigQueryDateShardedSpec +from google.cloud.datacatalog_v1.types.table_spec import BigQueryTableSpec +from google.cloud.datacatalog_v1.types.table_spec import TableSpec +from google.cloud.datacatalog_v1.types.table_spec import ViewSpec +from google.cloud.datacatalog_v1.types.table_spec import TableSourceType +from google.cloud.datacatalog_v1.types.tags import FieldType +from google.cloud.datacatalog_v1.types.tags import Tag +from google.cloud.datacatalog_v1.types.tags import TagField +from google.cloud.datacatalog_v1.types.tags import TagTemplate +from google.cloud.datacatalog_v1.types.tags import TagTemplateField +from google.cloud.datacatalog_v1.types.timestamps import SystemTimestamps +from google.cloud.datacatalog_v1.types.usage import CommonUsageStats +from google.cloud.datacatalog_v1.types.usage import UsageSignal +from google.cloud.datacatalog_v1.types.usage import UsageStats + +__all__ = ('DataCatalogClient', + 'DataCatalogAsyncClient', + 'PolicyTagManagerClient', + 'PolicyTagManagerAsyncClient', + 'PolicyTagManagerSerializationClient', + 'PolicyTagManagerSerializationAsyncClient', + 'BigQueryConnectionSpec', + 'BigQueryRoutineSpec', + 'CloudSqlBigQueryConnectionSpec', + 'PersonalDetails', + 'IntegratedSystem', + 'ManagingSystem', + 'DataSource', + 'StorageProperties', + 'BusinessContext', + 'CloudBigtableInstanceSpec', + 'CloudBigtableSystemSpec', + 'Contacts', + 'CreateEntryGroupRequest', + 'CreateEntryRequest', + 'CreateTagRequest', + 'CreateTagTemplateFieldRequest', + 'CreateTagTemplateRequest', + 'DatabaseTableSpec', + 'DatasetSpec', + 'DataSourceConnectionSpec', + 'DeleteEntryGroupRequest', + 'DeleteEntryRequest', + 'DeleteTagRequest', + 'DeleteTagTemplateFieldRequest', + 'DeleteTagTemplateRequest', + 'Entry', + 'EntryGroup', + 'EntryOverview', + 'FilesetSpec', + 'GetEntryGroupRequest', + 'GetEntryRequest', + 'GetTagTemplateRequest', + 'ImportEntriesMetadata', + 'ImportEntriesRequest', + 'ImportEntriesResponse', + 'ListEntriesRequest', + 'ListEntriesResponse', + 'ListEntryGroupsRequest', + 'ListEntryGroupsResponse', + 'ListTagsRequest', + 'ListTagsResponse', + 'LookerSystemSpec', + 'LookupEntryRequest', + 'ModelSpec', + 'ModifyEntryContactsRequest', + 'ModifyEntryOverviewRequest', + 'ReconcileTagsMetadata', + 'ReconcileTagsRequest', + 'ReconcileTagsResponse', + 'RenameTagTemplateFieldEnumValueRequest', + 'RenameTagTemplateFieldRequest', + 'RoutineSpec', + 'SearchCatalogRequest', + 'SearchCatalogResponse', + 'ServiceSpec', + 'SqlDatabaseSystemSpec', + 'StarEntryRequest', + 'StarEntryResponse', + 'UnstarEntryRequest', + 'UnstarEntryResponse', + 'UpdateEntryGroupRequest', + 'UpdateEntryRequest', + 'UpdateTagRequest', + 'UpdateTagTemplateFieldRequest', + 'UpdateTagTemplateRequest', + 'VertexDatasetSpec', + 'VertexModelSourceInfo', + 'VertexModelSpec', + 'EntryType', + 'DataplexExternalTable', + 'DataplexFilesetSpec', + 'DataplexSpec', + 'DataplexTableSpec', + 'DumpItem', + 'TaggedEntry', + 'GcsFilesetSpec', + 'GcsFileSpec', + 'PhysicalSchema', + 'CreatePolicyTagRequest', + 'CreateTaxonomyRequest', + 'DeletePolicyTagRequest', + 'DeleteTaxonomyRequest', + 'GetPolicyTagRequest', + 'GetTaxonomyRequest', + 'ListPolicyTagsRequest', + 'ListPolicyTagsResponse', + 'ListTaxonomiesRequest', + 'ListTaxonomiesResponse', + 'PolicyTag', + 'Taxonomy', + 'UpdatePolicyTagRequest', + 'UpdateTaxonomyRequest', + 'CrossRegionalSource', + 'ExportTaxonomiesRequest', + 'ExportTaxonomiesResponse', + 'ImportTaxonomiesRequest', + 'ImportTaxonomiesResponse', + 'InlineSource', + 'ReplaceTaxonomyRequest', + 'SerializedPolicyTag', + 'SerializedTaxonomy', + 'ColumnSchema', + 'Schema', + 'SearchCatalogResult', + 'SearchResultType', + 'BigQueryDateShardedSpec', + 'BigQueryTableSpec', + 'TableSpec', + 'ViewSpec', + 'TableSourceType', + 'FieldType', + 'Tag', + 'TagField', + 'TagTemplate', + 'TagTemplateField', + 'SystemTimestamps', + 'CommonUsageStats', + 'UsageSignal', + 'UsageStats', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/gapic_version.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/gapic_version.py new file mode 100644 index 000000000000..360a0d13ebdd --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/py.typed b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/py.typed new file mode 100644 index 000000000000..bb4088a3c198 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-datacatalog package uses inline types. diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/__init__.py new file mode 100644 index 000000000000..a8482519bf0d --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/__init__.py @@ -0,0 +1,274 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.datacatalog_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.data_catalog import DataCatalogClient +from .services.data_catalog import DataCatalogAsyncClient +from .services.policy_tag_manager import PolicyTagManagerClient +from .services.policy_tag_manager import PolicyTagManagerAsyncClient +from .services.policy_tag_manager_serialization import PolicyTagManagerSerializationClient +from .services.policy_tag_manager_serialization import PolicyTagManagerSerializationAsyncClient + +from .types.bigquery import BigQueryConnectionSpec +from .types.bigquery import BigQueryRoutineSpec +from .types.bigquery import CloudSqlBigQueryConnectionSpec +from .types.common import PersonalDetails +from .types.common import IntegratedSystem +from .types.common import ManagingSystem +from .types.data_source import DataSource +from .types.data_source import StorageProperties +from .types.datacatalog import BusinessContext +from .types.datacatalog import CloudBigtableInstanceSpec +from .types.datacatalog import CloudBigtableSystemSpec +from .types.datacatalog import Contacts +from .types.datacatalog import CreateEntryGroupRequest +from .types.datacatalog import CreateEntryRequest +from .types.datacatalog import CreateTagRequest +from .types.datacatalog import CreateTagTemplateFieldRequest +from .types.datacatalog import CreateTagTemplateRequest +from .types.datacatalog import DatabaseTableSpec +from .types.datacatalog import DatasetSpec +from .types.datacatalog import DataSourceConnectionSpec +from .types.datacatalog import DeleteEntryGroupRequest +from .types.datacatalog import DeleteEntryRequest +from .types.datacatalog import DeleteTagRequest +from .types.datacatalog import DeleteTagTemplateFieldRequest +from .types.datacatalog import DeleteTagTemplateRequest +from .types.datacatalog import Entry +from .types.datacatalog import EntryGroup +from .types.datacatalog import EntryOverview +from .types.datacatalog import FilesetSpec +from .types.datacatalog import GetEntryGroupRequest +from .types.datacatalog import GetEntryRequest +from .types.datacatalog import GetTagTemplateRequest +from .types.datacatalog import ImportEntriesMetadata +from .types.datacatalog import ImportEntriesRequest +from .types.datacatalog import ImportEntriesResponse +from .types.datacatalog import ListEntriesRequest +from .types.datacatalog import ListEntriesResponse +from .types.datacatalog import ListEntryGroupsRequest +from .types.datacatalog import ListEntryGroupsResponse +from .types.datacatalog import ListTagsRequest +from .types.datacatalog import ListTagsResponse +from .types.datacatalog import LookerSystemSpec +from .types.datacatalog import LookupEntryRequest +from .types.datacatalog import ModelSpec +from .types.datacatalog import ModifyEntryContactsRequest +from .types.datacatalog import ModifyEntryOverviewRequest +from .types.datacatalog import ReconcileTagsMetadata +from .types.datacatalog import ReconcileTagsRequest +from .types.datacatalog import ReconcileTagsResponse +from .types.datacatalog import RenameTagTemplateFieldEnumValueRequest +from .types.datacatalog import RenameTagTemplateFieldRequest +from .types.datacatalog import RoutineSpec +from .types.datacatalog import SearchCatalogRequest +from .types.datacatalog import SearchCatalogResponse +from .types.datacatalog import ServiceSpec +from .types.datacatalog import SqlDatabaseSystemSpec +from .types.datacatalog import StarEntryRequest +from .types.datacatalog import StarEntryResponse +from .types.datacatalog import UnstarEntryRequest +from .types.datacatalog import UnstarEntryResponse +from .types.datacatalog import UpdateEntryGroupRequest +from .types.datacatalog import UpdateEntryRequest +from .types.datacatalog import UpdateTagRequest +from .types.datacatalog import UpdateTagTemplateFieldRequest +from .types.datacatalog import UpdateTagTemplateRequest +from .types.datacatalog import VertexDatasetSpec +from .types.datacatalog import VertexModelSourceInfo +from .types.datacatalog import VertexModelSpec +from .types.datacatalog import EntryType +from .types.dataplex_spec import DataplexExternalTable +from .types.dataplex_spec import DataplexFilesetSpec +from .types.dataplex_spec import DataplexSpec +from .types.dataplex_spec import DataplexTableSpec +from .types.dump_content import DumpItem +from .types.dump_content import TaggedEntry +from .types.gcs_fileset_spec import GcsFilesetSpec +from .types.gcs_fileset_spec import GcsFileSpec +from .types.physical_schema import PhysicalSchema +from .types.policytagmanager import CreatePolicyTagRequest +from .types.policytagmanager import CreateTaxonomyRequest +from .types.policytagmanager import DeletePolicyTagRequest +from .types.policytagmanager import DeleteTaxonomyRequest +from .types.policytagmanager import GetPolicyTagRequest +from .types.policytagmanager import GetTaxonomyRequest +from .types.policytagmanager import ListPolicyTagsRequest +from .types.policytagmanager import ListPolicyTagsResponse +from .types.policytagmanager import ListTaxonomiesRequest +from .types.policytagmanager import ListTaxonomiesResponse +from .types.policytagmanager import PolicyTag +from .types.policytagmanager import Taxonomy +from .types.policytagmanager import UpdatePolicyTagRequest +from .types.policytagmanager import UpdateTaxonomyRequest +from .types.policytagmanagerserialization import CrossRegionalSource +from .types.policytagmanagerserialization import ExportTaxonomiesRequest +from .types.policytagmanagerserialization import ExportTaxonomiesResponse +from .types.policytagmanagerserialization import ImportTaxonomiesRequest +from .types.policytagmanagerserialization import ImportTaxonomiesResponse +from .types.policytagmanagerserialization import InlineSource +from .types.policytagmanagerserialization import ReplaceTaxonomyRequest +from .types.policytagmanagerserialization import SerializedPolicyTag +from .types.policytagmanagerserialization import SerializedTaxonomy +from .types.schema import ColumnSchema +from .types.schema import Schema +from .types.search import SearchCatalogResult +from .types.search import SearchResultType +from .types.table_spec import BigQueryDateShardedSpec +from .types.table_spec import BigQueryTableSpec +from .types.table_spec import TableSpec +from .types.table_spec import ViewSpec +from .types.table_spec import TableSourceType +from .types.tags import FieldType +from .types.tags import Tag +from .types.tags import TagField +from .types.tags import TagTemplate +from .types.tags import TagTemplateField +from .types.timestamps import SystemTimestamps +from .types.usage import CommonUsageStats +from .types.usage import UsageSignal +from .types.usage import UsageStats + +__all__ = ( + 'DataCatalogAsyncClient', + 'PolicyTagManagerAsyncClient', + 'PolicyTagManagerSerializationAsyncClient', +'BigQueryConnectionSpec', +'BigQueryDateShardedSpec', +'BigQueryRoutineSpec', +'BigQueryTableSpec', +'BusinessContext', +'CloudBigtableInstanceSpec', +'CloudBigtableSystemSpec', +'CloudSqlBigQueryConnectionSpec', +'ColumnSchema', +'CommonUsageStats', +'Contacts', +'CreateEntryGroupRequest', +'CreateEntryRequest', +'CreatePolicyTagRequest', +'CreateTagRequest', +'CreateTagTemplateFieldRequest', +'CreateTagTemplateRequest', +'CreateTaxonomyRequest', +'CrossRegionalSource', +'DataCatalogClient', +'DataSource', +'DataSourceConnectionSpec', +'DatabaseTableSpec', +'DataplexExternalTable', +'DataplexFilesetSpec', +'DataplexSpec', +'DataplexTableSpec', +'DatasetSpec', +'DeleteEntryGroupRequest', +'DeleteEntryRequest', +'DeletePolicyTagRequest', +'DeleteTagRequest', +'DeleteTagTemplateFieldRequest', +'DeleteTagTemplateRequest', +'DeleteTaxonomyRequest', +'DumpItem', +'Entry', +'EntryGroup', +'EntryOverview', +'EntryType', +'ExportTaxonomiesRequest', +'ExportTaxonomiesResponse', +'FieldType', +'FilesetSpec', +'GcsFileSpec', +'GcsFilesetSpec', +'GetEntryGroupRequest', +'GetEntryRequest', +'GetPolicyTagRequest', +'GetTagTemplateRequest', +'GetTaxonomyRequest', +'ImportEntriesMetadata', +'ImportEntriesRequest', +'ImportEntriesResponse', +'ImportTaxonomiesRequest', +'ImportTaxonomiesResponse', +'InlineSource', +'IntegratedSystem', +'ListEntriesRequest', +'ListEntriesResponse', +'ListEntryGroupsRequest', +'ListEntryGroupsResponse', +'ListPolicyTagsRequest', +'ListPolicyTagsResponse', +'ListTagsRequest', +'ListTagsResponse', +'ListTaxonomiesRequest', +'ListTaxonomiesResponse', +'LookerSystemSpec', +'LookupEntryRequest', +'ManagingSystem', +'ModelSpec', +'ModifyEntryContactsRequest', +'ModifyEntryOverviewRequest', +'PersonalDetails', +'PhysicalSchema', +'PolicyTag', +'PolicyTagManagerClient', +'PolicyTagManagerSerializationClient', +'ReconcileTagsMetadata', +'ReconcileTagsRequest', +'ReconcileTagsResponse', +'RenameTagTemplateFieldEnumValueRequest', +'RenameTagTemplateFieldRequest', +'ReplaceTaxonomyRequest', +'RoutineSpec', +'Schema', +'SearchCatalogRequest', +'SearchCatalogResponse', +'SearchCatalogResult', +'SearchResultType', +'SerializedPolicyTag', +'SerializedTaxonomy', +'ServiceSpec', +'SqlDatabaseSystemSpec', +'StarEntryRequest', +'StarEntryResponse', +'StorageProperties', +'SystemTimestamps', +'TableSourceType', +'TableSpec', +'Tag', +'TagField', +'TagTemplate', +'TagTemplateField', +'TaggedEntry', +'Taxonomy', +'UnstarEntryRequest', +'UnstarEntryResponse', +'UpdateEntryGroupRequest', +'UpdateEntryRequest', +'UpdatePolicyTagRequest', +'UpdateTagRequest', +'UpdateTagTemplateFieldRequest', +'UpdateTagTemplateRequest', +'UpdateTaxonomyRequest', +'UsageSignal', +'UsageStats', +'VertexDatasetSpec', +'VertexModelSourceInfo', +'VertexModelSpec', +'ViewSpec', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/gapic_metadata.json new file mode 100644 index 000000000000..447d15595848 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/gapic_metadata.json @@ -0,0 +1,551 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.datacatalog_v1", + "protoPackage": "google.cloud.datacatalog.v1", + "schema": "1.0", + "services": { + "DataCatalog": { + "clients": { + "grpc": { + "libraryClient": "DataCatalogClient", + "rpcs": { + "CreateEntry": { + "methods": [ + "create_entry" + ] + }, + "CreateEntryGroup": { + "methods": [ + "create_entry_group" + ] + }, + "CreateTag": { + "methods": [ + "create_tag" + ] + }, + "CreateTagTemplate": { + "methods": [ + "create_tag_template" + ] + }, + "CreateTagTemplateField": { + "methods": [ + "create_tag_template_field" + ] + }, + "DeleteEntry": { + "methods": [ + "delete_entry" + ] + }, + "DeleteEntryGroup": { + "methods": [ + "delete_entry_group" + ] + }, + "DeleteTag": { + "methods": [ + "delete_tag" + ] + }, + "DeleteTagTemplate": { + "methods": [ + "delete_tag_template" + ] + }, + "DeleteTagTemplateField": { + "methods": [ + "delete_tag_template_field" + ] + }, + "GetEntry": { + "methods": [ + "get_entry" + ] + }, + "GetEntryGroup": { + "methods": [ + "get_entry_group" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetTagTemplate": { + "methods": [ + "get_tag_template" + ] + }, + "ImportEntries": { + "methods": [ + "import_entries" + ] + }, + "ListEntries": { + "methods": [ + "list_entries" + ] + }, + "ListEntryGroups": { + "methods": [ + "list_entry_groups" + ] + }, + "ListTags": { + "methods": [ + "list_tags" + ] + }, + "LookupEntry": { + "methods": [ + "lookup_entry" + ] + }, + "ModifyEntryContacts": { + "methods": [ + "modify_entry_contacts" + ] + }, + "ModifyEntryOverview": { + "methods": [ + "modify_entry_overview" + ] + }, + "ReconcileTags": { + "methods": [ + "reconcile_tags" + ] + }, + "RenameTagTemplateField": { + "methods": [ + "rename_tag_template_field" + ] + }, + "RenameTagTemplateFieldEnumValue": { + "methods": [ + "rename_tag_template_field_enum_value" + ] + }, + "SearchCatalog": { + "methods": [ + "search_catalog" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "StarEntry": { + "methods": [ + "star_entry" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UnstarEntry": { + "methods": [ + "unstar_entry" + ] + }, + "UpdateEntry": { + "methods": [ + "update_entry" + ] + }, + "UpdateEntryGroup": { + "methods": [ + "update_entry_group" + ] + }, + "UpdateTag": { + "methods": [ + "update_tag" + ] + }, + "UpdateTagTemplate": { + "methods": [ + "update_tag_template" + ] + }, + "UpdateTagTemplateField": { + "methods": [ + "update_tag_template_field" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DataCatalogAsyncClient", + "rpcs": { + "CreateEntry": { + "methods": [ + "create_entry" + ] + }, + "CreateEntryGroup": { + "methods": [ + "create_entry_group" + ] + }, + "CreateTag": { + "methods": [ + "create_tag" + ] + }, + "CreateTagTemplate": { + "methods": [ + "create_tag_template" + ] + }, + "CreateTagTemplateField": { + "methods": [ + "create_tag_template_field" + ] + }, + "DeleteEntry": { + "methods": [ + "delete_entry" + ] + }, + "DeleteEntryGroup": { + "methods": [ + "delete_entry_group" + ] + }, + "DeleteTag": { + "methods": [ + "delete_tag" + ] + }, + "DeleteTagTemplate": { + "methods": [ + "delete_tag_template" + ] + }, + "DeleteTagTemplateField": { + "methods": [ + "delete_tag_template_field" + ] + }, + "GetEntry": { + "methods": [ + "get_entry" + ] + }, + "GetEntryGroup": { + "methods": [ + "get_entry_group" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetTagTemplate": { + "methods": [ + "get_tag_template" + ] + }, + "ImportEntries": { + "methods": [ + "import_entries" + ] + }, + "ListEntries": { + "methods": [ + "list_entries" + ] + }, + "ListEntryGroups": { + "methods": [ + "list_entry_groups" + ] + }, + "ListTags": { + "methods": [ + "list_tags" + ] + }, + "LookupEntry": { + "methods": [ + "lookup_entry" + ] + }, + "ModifyEntryContacts": { + "methods": [ + "modify_entry_contacts" + ] + }, + "ModifyEntryOverview": { + "methods": [ + "modify_entry_overview" + ] + }, + "ReconcileTags": { + "methods": [ + "reconcile_tags" + ] + }, + "RenameTagTemplateField": { + "methods": [ + "rename_tag_template_field" + ] + }, + "RenameTagTemplateFieldEnumValue": { + "methods": [ + "rename_tag_template_field_enum_value" + ] + }, + "SearchCatalog": { + "methods": [ + "search_catalog" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "StarEntry": { + "methods": [ + "star_entry" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UnstarEntry": { + "methods": [ + "unstar_entry" + ] + }, + "UpdateEntry": { + "methods": [ + "update_entry" + ] + }, + "UpdateEntryGroup": { + "methods": [ + "update_entry_group" + ] + }, + "UpdateTag": { + "methods": [ + "update_tag" + ] + }, + "UpdateTagTemplate": { + "methods": [ + "update_tag_template" + ] + }, + "UpdateTagTemplateField": { + "methods": [ + "update_tag_template_field" + ] + } + } + } + } + }, + "PolicyTagManager": { + "clients": { + "grpc": { + "libraryClient": "PolicyTagManagerClient", + "rpcs": { + "CreatePolicyTag": { + "methods": [ + "create_policy_tag" + ] + }, + "CreateTaxonomy": { + "methods": [ + "create_taxonomy" + ] + }, + "DeletePolicyTag": { + "methods": [ + "delete_policy_tag" + ] + }, + "DeleteTaxonomy": { + "methods": [ + "delete_taxonomy" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetPolicyTag": { + "methods": [ + "get_policy_tag" + ] + }, + "GetTaxonomy": { + "methods": [ + "get_taxonomy" + ] + }, + "ListPolicyTags": { + "methods": [ + "list_policy_tags" + ] + }, + "ListTaxonomies": { + "methods": [ + "list_taxonomies" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdatePolicyTag": { + "methods": [ + "update_policy_tag" + ] + }, + "UpdateTaxonomy": { + "methods": [ + "update_taxonomy" + ] + } + } + }, + "grpc-async": { + "libraryClient": "PolicyTagManagerAsyncClient", + "rpcs": { + "CreatePolicyTag": { + "methods": [ + "create_policy_tag" + ] + }, + "CreateTaxonomy": { + "methods": [ + "create_taxonomy" + ] + }, + "DeletePolicyTag": { + "methods": [ + "delete_policy_tag" + ] + }, + "DeleteTaxonomy": { + "methods": [ + "delete_taxonomy" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetPolicyTag": { + "methods": [ + "get_policy_tag" + ] + }, + "GetTaxonomy": { + "methods": [ + "get_taxonomy" + ] + }, + "ListPolicyTags": { + "methods": [ + "list_policy_tags" + ] + }, + "ListTaxonomies": { + "methods": [ + "list_taxonomies" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdatePolicyTag": { + "methods": [ + "update_policy_tag" + ] + }, + "UpdateTaxonomy": { + "methods": [ + "update_taxonomy" + ] + } + } + } + } + }, + "PolicyTagManagerSerialization": { + "clients": { + "grpc": { + "libraryClient": "PolicyTagManagerSerializationClient", + "rpcs": { + "ExportTaxonomies": { + "methods": [ + "export_taxonomies" + ] + }, + "ImportTaxonomies": { + "methods": [ + "import_taxonomies" + ] + }, + "ReplaceTaxonomy": { + "methods": [ + "replace_taxonomy" + ] + } + } + }, + "grpc-async": { + "libraryClient": "PolicyTagManagerSerializationAsyncClient", + "rpcs": { + "ExportTaxonomies": { + "methods": [ + "export_taxonomies" + ] + }, + "ImportTaxonomies": { + "methods": [ + "import_taxonomies" + ] + }, + "ReplaceTaxonomy": { + "methods": [ + "replace_taxonomy" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/gapic_version.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/gapic_version.py new file mode 100644 index 000000000000..360a0d13ebdd --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/py.typed b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/py.typed new file mode 100644 index 000000000000..bb4088a3c198 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-datacatalog package uses inline types. diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/__init__.py new file mode 100644 index 000000000000..e703e914bb2c --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import DataCatalogClient +from .async_client import DataCatalogAsyncClient + +__all__ = ( + 'DataCatalogClient', + 'DataCatalogAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/async_client.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/async_client.py new file mode 100644 index 000000000000..1f4c84524fc9 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/async_client.py @@ -0,0 +1,4510 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.datacatalog_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.datacatalog_v1.services.data_catalog import pagers +from google.cloud.datacatalog_v1.types import common +from google.cloud.datacatalog_v1.types import data_source +from google.cloud.datacatalog_v1.types import datacatalog +from google.cloud.datacatalog_v1.types import gcs_fileset_spec +from google.cloud.datacatalog_v1.types import schema +from google.cloud.datacatalog_v1.types import search +from google.cloud.datacatalog_v1.types import table_spec +from google.cloud.datacatalog_v1.types import tags +from google.cloud.datacatalog_v1.types import timestamps +from google.cloud.datacatalog_v1.types import usage +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import DataCatalogGrpcAsyncIOTransport +from .client import DataCatalogClient + + +class DataCatalogAsyncClient: + """Data Catalog API service allows you to discover, understand, + and manage your data. + """ + + _client: DataCatalogClient + + DEFAULT_ENDPOINT = DataCatalogClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DataCatalogClient.DEFAULT_MTLS_ENDPOINT + + entry_path = staticmethod(DataCatalogClient.entry_path) + parse_entry_path = staticmethod(DataCatalogClient.parse_entry_path) + entry_group_path = staticmethod(DataCatalogClient.entry_group_path) + parse_entry_group_path = staticmethod(DataCatalogClient.parse_entry_group_path) + tag_path = staticmethod(DataCatalogClient.tag_path) + parse_tag_path = staticmethod(DataCatalogClient.parse_tag_path) + tag_template_path = staticmethod(DataCatalogClient.tag_template_path) + parse_tag_template_path = staticmethod(DataCatalogClient.parse_tag_template_path) + tag_template_field_path = staticmethod(DataCatalogClient.tag_template_field_path) + parse_tag_template_field_path = staticmethod(DataCatalogClient.parse_tag_template_field_path) + tag_template_field_enum_value_path = staticmethod(DataCatalogClient.tag_template_field_enum_value_path) + parse_tag_template_field_enum_value_path = staticmethod(DataCatalogClient.parse_tag_template_field_enum_value_path) + common_billing_account_path = staticmethod(DataCatalogClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(DataCatalogClient.parse_common_billing_account_path) + common_folder_path = staticmethod(DataCatalogClient.common_folder_path) + parse_common_folder_path = staticmethod(DataCatalogClient.parse_common_folder_path) + common_organization_path = staticmethod(DataCatalogClient.common_organization_path) + parse_common_organization_path = staticmethod(DataCatalogClient.parse_common_organization_path) + common_project_path = staticmethod(DataCatalogClient.common_project_path) + parse_common_project_path = staticmethod(DataCatalogClient.parse_common_project_path) + common_location_path = staticmethod(DataCatalogClient.common_location_path) + parse_common_location_path = staticmethod(DataCatalogClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataCatalogAsyncClient: The constructed client. + """ + return DataCatalogClient.from_service_account_info.__func__(DataCatalogAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataCatalogAsyncClient: The constructed client. + """ + return DataCatalogClient.from_service_account_file.__func__(DataCatalogAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DataCatalogClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DataCatalogTransport: + """Returns the transport used by the client instance. + + Returns: + DataCatalogTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(DataCatalogClient).get_transport_class, type(DataCatalogClient)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, DataCatalogTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data catalog client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DataCatalogTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DataCatalogClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def search_catalog(self, + request: Optional[Union[datacatalog.SearchCatalogRequest, dict]] = None, + *, + scope: Optional[datacatalog.SearchCatalogRequest.Scope] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchCatalogAsyncPager: + r"""Searches Data Catalog for multiple resources like entries and + tags that match a query. + + This is a [Custom Method] + (https://cloud.google.com/apis/design/custom_methods) that + doesn't return all information on a resource, only its ID and + high level fields. To get more information, you can subsequently + call specific get methods. + + Note: Data Catalog search queries don't guarantee full recall. + Results that match your query might not be returned, even in + subsequent result pages. Additionally, returned (and not + returned) results can vary if you repeat search queries. + + For more information, see [Data Catalog search syntax] + (https://cloud.google.com/data-catalog/docs/how-to/search-reference). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_search_catalog(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.SearchCatalogRequest( + ) + + # Make the request + page_result = client.search_catalog(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.SearchCatalogRequest, dict]]): + The request object. Request message for + [SearchCatalog][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog]. + scope (:class:`google.cloud.datacatalog_v1.types.SearchCatalogRequest.Scope`): + Required. The scope of this search request. + + The ``scope`` is invalid if ``include_org_ids``, + ``include_project_ids`` are empty AND + ``include_gcp_public_datasets`` is set to ``false``. In + this case, the request returns an error. + + This corresponds to the ``scope`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (:class:`str`): + Optional. The query string with a minimum of 3 + characters and specific syntax. For more information, + see `Data Catalog search + syntax `__. + + An empty query string returns all data assets (in the + specified scope) that you have access to. + + A query string can be a simple ``xyz`` or qualified by + predicates: + + - ``name:x`` + - ``column:y`` + - ``description:z`` + + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.services.data_catalog.pagers.SearchCatalogAsyncPager: + Response message for + [SearchCatalog][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([scope, query]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.SearchCatalogRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if scope is not None: + request.scope = scope + if query is not None: + request.query = query + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.search_catalog, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.SearchCatalogAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_entry_group(self, + request: Optional[Union[datacatalog.CreateEntryGroupRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry_group_id: Optional[str] = None, + entry_group: Optional[datacatalog.EntryGroup] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.EntryGroup: + r"""Creates an entry group. + + An entry group contains logically related entries together with + `Cloud Identity and Access + Management `__ policies. These + policies specify users who can create, edit, and view entries + within entry groups. + + Data Catalog automatically creates entry groups with names that + start with the ``@`` symbol for the following resources: + + - BigQuery entries (``@bigquery``) + - Pub/Sub topics (``@pubsub``) + - Dataproc Metastore services + (``@dataproc_metastore_{SERVICE_NAME_HASH}``) + + You can create your own entry groups for Cloud Storage fileset + entries and custom entries together with the corresponding IAM + policies. User-created entry groups can't contain the ``@`` + symbol, it is reserved for automatically created groups. + + Entry groups, like entries, can be searched. + + A maximum of 10,000 entry groups may be created per organization + across all locations. + + You must enable the Data Catalog API in the project identified + by the ``parent`` parameter. For more information, see `Data + Catalog resource + project `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_create_entry_group(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.CreateEntryGroupRequest( + parent="parent_value", + entry_group_id="entry_group_id_value", + ) + + # Make the request + response = await client.create_entry_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.CreateEntryGroupRequest, dict]]): + The request object. Request message for + [CreateEntryGroup][google.cloud.datacatalog.v1.DataCatalog.CreateEntryGroup]. + parent (:class:`str`): + Required. The names of the project + and location that the new entry group + belongs to. + + Note: The entry group itself and its + child resources might not be stored in + the location specified in its name. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_group_id (:class:`str`): + Required. The ID of the entry group to create. + + The ID must contain only letters (a-z, A-Z), numbers + (0-9), underscores (_), and must start with a letter or + underscore. The maximum size is 64 bytes when encoded in + UTF-8. + + This corresponds to the ``entry_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_group (:class:`google.cloud.datacatalog_v1.types.EntryGroup`): + The entry group to create. Defaults + to empty. + + This corresponds to the ``entry_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.EntryGroup: + Entry group metadata. + + An EntryGroup resource represents a logical grouping + of zero or more Data Catalog + [Entry][google.cloud.datacatalog.v1.Entry] resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entry_group_id, entry_group]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.CreateEntryGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry_group_id is not None: + request.entry_group_id = entry_group_id + if entry_group is not None: + request.entry_group = entry_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_entry_group, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_entry_group(self, + request: Optional[Union[datacatalog.GetEntryGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + read_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.EntryGroup: + r"""Gets an entry group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_get_entry_group(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.GetEntryGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.GetEntryGroupRequest, dict]]): + The request object. Request message for + [GetEntryGroup][google.cloud.datacatalog.v1.DataCatalog.GetEntryGroup]. + name (:class:`str`): + Required. The name of the entry group + to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + read_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The fields to return. If empty or + omitted, all fields are returned. + + This corresponds to the ``read_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.EntryGroup: + Entry group metadata. + + An EntryGroup resource represents a logical grouping + of zero or more Data Catalog + [Entry][google.cloud.datacatalog.v1.Entry] resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, read_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.GetEntryGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if read_mask is not None: + request.read_mask = read_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_entry_group, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_entry_group(self, + request: Optional[Union[datacatalog.UpdateEntryGroupRequest, dict]] = None, + *, + entry_group: Optional[datacatalog.EntryGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.EntryGroup: + r"""Updates an entry group. + + You must enable the Data Catalog API in the project identified + by the ``entry_group.name`` parameter. For more information, see + `Data Catalog resource + project `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_update_entry_group(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.UpdateEntryGroupRequest( + ) + + # Make the request + response = await client.update_entry_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.UpdateEntryGroupRequest, dict]]): + The request object. Request message for + [UpdateEntryGroup][google.cloud.datacatalog.v1.DataCatalog.UpdateEntryGroup]. + entry_group (:class:`google.cloud.datacatalog_v1.types.EntryGroup`): + Required. Updates for the entry group. The ``name`` + field must be set. + + This corresponds to the ``entry_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Names of fields whose values to + overwrite on an entry group. + If this parameter is absent or empty, + all modifiable fields are overwritten. + If such fields are non-required and + omitted in the request body, their + values are emptied. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.EntryGroup: + Entry group metadata. + + An EntryGroup resource represents a logical grouping + of zero or more Data Catalog + [Entry][google.cloud.datacatalog.v1.Entry] resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([entry_group, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.UpdateEntryGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entry_group is not None: + request.entry_group = entry_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_entry_group, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entry_group.name", request.entry_group.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_entry_group(self, + request: Optional[Union[datacatalog.DeleteEntryGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an entry group. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see `Data + Catalog resource + project `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_delete_entry_group(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteEntryGroupRequest( + name="name_value", + ) + + # Make the request + await client.delete_entry_group(request=request) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.DeleteEntryGroupRequest, dict]]): + The request object. Request message for + [DeleteEntryGroup][google.cloud.datacatalog.v1.DataCatalog.DeleteEntryGroup]. + name (:class:`str`): + Required. The name of the entry group + to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.DeleteEntryGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_entry_group, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_entry_groups(self, + request: Optional[Union[datacatalog.ListEntryGroupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntryGroupsAsyncPager: + r"""Lists entry groups. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_list_entry_groups(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.ListEntryGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.ListEntryGroupsRequest, dict]]): + The request object. Request message for + [ListEntryGroups][google.cloud.datacatalog.v1.DataCatalog.ListEntryGroups]. + parent (:class:`str`): + Required. The name of the location + that contains the entry groups to list. + Can be provided as a URL. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.services.data_catalog.pagers.ListEntryGroupsAsyncPager: + Response message for + [ListEntryGroups][google.cloud.datacatalog.v1.DataCatalog.ListEntryGroups]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.ListEntryGroupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_entry_groups, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEntryGroupsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_entry(self, + request: Optional[Union[datacatalog.CreateEntryRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry_id: Optional[str] = None, + entry: Optional[datacatalog.Entry] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.Entry: + r"""Creates an entry. + + You can create entries only with 'FILESET', 'CLUSTER', + 'DATA_STREAM', or custom types. Data Catalog automatically + creates entries with other types during metadata ingestion from + integrated systems. + + You must enable the Data Catalog API in the project identified + by the ``parent`` parameter. For more information, see `Data + Catalog resource + project `__. + + An entry group can have a maximum of 100,000 entries. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_create_entry(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + entry = datacatalog_v1.Entry() + entry.type_ = "LOOK" + entry.integrated_system = "VERTEX_AI" + entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] + + request = datacatalog_v1.CreateEntryRequest( + parent="parent_value", + entry_id="entry_id_value", + entry=entry, + ) + + # Make the request + response = await client.create_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.CreateEntryRequest, dict]]): + The request object. Request message for + [CreateEntry][google.cloud.datacatalog.v1.DataCatalog.CreateEntry]. + parent (:class:`str`): + Required. The name of the entry group + this entry belongs to. + Note: The entry itself and its child + resources might not be stored in the + location specified in its name. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_id (:class:`str`): + Required. The ID of the entry to create. + + The ID must contain only letters (a-z, A-Z), numbers + (0-9), and underscores (_). The maximum size is 64 bytes + when encoded in UTF-8. + + This corresponds to the ``entry_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry (:class:`google.cloud.datacatalog_v1.types.Entry`): + Required. The entry to create. + This corresponds to the ``entry`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Entry: + Entry metadata. + A Data Catalog entry represents another resource in + Google Cloud Platform (such as a BigQuery dataset or + a Pub/Sub topic) or outside of it. You can use the + linked_resource field in the entry resource to refer + to the original resource ID of the source system. + + An entry resource contains resource details, for + example, its schema. Additionally, you can attach + flexible metadata to an entry in the form of a + [Tag][google.cloud.datacatalog.v1.Tag]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entry_id, entry]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.CreateEntryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry_id is not None: + request.entry_id = entry_id + if entry is not None: + request.entry = entry + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_entry, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_entry(self, + request: Optional[Union[datacatalog.UpdateEntryRequest, dict]] = None, + *, + entry: Optional[datacatalog.Entry] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.Entry: + r"""Updates an existing entry. + + You must enable the Data Catalog API in the project identified + by the ``entry.name`` parameter. For more information, see `Data + Catalog resource + project `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_update_entry(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + entry = datacatalog_v1.Entry() + entry.type_ = "LOOK" + entry.integrated_system = "VERTEX_AI" + entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] + + request = datacatalog_v1.UpdateEntryRequest( + entry=entry, + ) + + # Make the request + response = await client.update_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.UpdateEntryRequest, dict]]): + The request object. Request message for + [UpdateEntry][google.cloud.datacatalog.v1.DataCatalog.UpdateEntry]. + entry (:class:`google.cloud.datacatalog_v1.types.Entry`): + Required. Updates for the entry. The ``name`` field must + be set. + + This corresponds to the ``entry`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Names of fields whose values to overwrite on an entry. + + If this parameter is absent or empty, all modifiable + fields are overwritten. If such fields are non-required + and omitted in the request body, their values are + emptied. + + You can modify only the fields listed below. + + For entries with type ``DATA_STREAM``: + + - ``schema`` + + For entries with type ``FILESET``: + + - ``schema`` + - ``display_name`` + - ``description`` + - ``gcs_fileset_spec`` + - ``gcs_fileset_spec.file_patterns`` + + For entries with ``user_specified_type``: + + - ``schema`` + - ``display_name`` + - ``description`` + - ``user_specified_type`` + - ``user_specified_system`` + - ``linked_resource`` + - ``source_system_timestamps`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Entry: + Entry metadata. + A Data Catalog entry represents another resource in + Google Cloud Platform (such as a BigQuery dataset or + a Pub/Sub topic) or outside of it. You can use the + linked_resource field in the entry resource to refer + to the original resource ID of the source system. + + An entry resource contains resource details, for + example, its schema. Additionally, you can attach + flexible metadata to an entry in the form of a + [Tag][google.cloud.datacatalog.v1.Tag]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([entry, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.UpdateEntryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entry is not None: + request.entry = entry + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_entry, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entry.name", request.entry.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_entry(self, + request: Optional[Union[datacatalog.DeleteEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an existing entry. + + You can delete only the entries created by the + [CreateEntry][google.cloud.datacatalog.v1.DataCatalog.CreateEntry] + method. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see `Data + Catalog resource + project `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_delete_entry(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteEntryRequest( + name="name_value", + ) + + # Make the request + await client.delete_entry(request=request) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.DeleteEntryRequest, dict]]): + The request object. Request message for + [DeleteEntry][google.cloud.datacatalog.v1.DataCatalog.DeleteEntry]. + name (:class:`str`): + Required. The name of the entry to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.DeleteEntryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_entry, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_entry(self, + request: Optional[Union[datacatalog.GetEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.Entry: + r"""Gets an entry. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_get_entry(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.GetEntryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.GetEntryRequest, dict]]): + The request object. Request message for + [GetEntry][google.cloud.datacatalog.v1.DataCatalog.GetEntry]. + name (:class:`str`): + Required. The name of the entry to + get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Entry: + Entry metadata. + A Data Catalog entry represents another resource in + Google Cloud Platform (such as a BigQuery dataset or + a Pub/Sub topic) or outside of it. You can use the + linked_resource field in the entry resource to refer + to the original resource ID of the source system. + + An entry resource contains resource details, for + example, its schema. Additionally, you can attach + flexible metadata to an entry in the form of a + [Tag][google.cloud.datacatalog.v1.Tag]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.GetEntryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_entry, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def lookup_entry(self, + request: Optional[Union[datacatalog.LookupEntryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.Entry: + r"""Gets an entry by its target resource name. + + The resource name comes from the source Google Cloud + Platform service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_lookup_entry(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.LookupEntryRequest( + linked_resource="linked_resource_value", + ) + + # Make the request + response = await client.lookup_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.LookupEntryRequest, dict]]): + The request object. Request message for + [LookupEntry][google.cloud.datacatalog.v1.DataCatalog.LookupEntry]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Entry: + Entry metadata. + A Data Catalog entry represents another resource in + Google Cloud Platform (such as a BigQuery dataset or + a Pub/Sub topic) or outside of it. You can use the + linked_resource field in the entry resource to refer + to the original resource ID of the source system. + + An entry resource contains resource details, for + example, its schema. Additionally, you can attach + flexible metadata to an entry in the form of a + [Tag][google.cloud.datacatalog.v1.Tag]. + + """ + # Create or coerce a protobuf request object. + request = datacatalog.LookupEntryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.lookup_entry, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_entries(self, + request: Optional[Union[datacatalog.ListEntriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntriesAsyncPager: + r"""Lists entries. + + Note: Currently, this method can list only custom entries. To + get a list of both custom and automatically created entries, use + [SearchCatalog][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_list_entries(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.ListEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entries(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.ListEntriesRequest, dict]]): + The request object. Request message for + [ListEntries][google.cloud.datacatalog.v1.DataCatalog.ListEntries]. + parent (:class:`str`): + Required. The name of the entry group + that contains the entries to list. + Can be provided in URL format. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.services.data_catalog.pagers.ListEntriesAsyncPager: + Response message for + [ListEntries][google.cloud.datacatalog.v1.DataCatalog.ListEntries]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.ListEntriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_entries, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEntriesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def modify_entry_overview(self, + request: Optional[Union[datacatalog.ModifyEntryOverviewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.EntryOverview: + r"""Modifies entry overview, part of the business context of an + [Entry][google.cloud.datacatalog.v1.Entry]. + + To call this method, you must have the + ``datacatalog.entries.updateOverview`` IAM permission on the + corresponding project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_modify_entry_overview(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.ModifyEntryOverviewRequest( + name="name_value", + ) + + # Make the request + response = await client.modify_entry_overview(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.ModifyEntryOverviewRequest, dict]]): + The request object. Request message for + [ModifyEntryOverview][google.cloud.datacatalog.v1.DataCatalog.ModifyEntryOverview]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.EntryOverview: + Entry overview fields for rich text + descriptions of entries. + + """ + # Create or coerce a protobuf request object. + request = datacatalog.ModifyEntryOverviewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.modify_entry_overview, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def modify_entry_contacts(self, + request: Optional[Union[datacatalog.ModifyEntryContactsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.Contacts: + r"""Modifies contacts, part of the business context of an + [Entry][google.cloud.datacatalog.v1.Entry]. + + To call this method, you must have the + ``datacatalog.entries.updateContacts`` IAM permission on the + corresponding project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_modify_entry_contacts(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.ModifyEntryContactsRequest( + name="name_value", + ) + + # Make the request + response = await client.modify_entry_contacts(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.ModifyEntryContactsRequest, dict]]): + The request object. Request message for + [ModifyEntryContacts][google.cloud.datacatalog.v1.DataCatalog.ModifyEntryContacts]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Contacts: + Contact people for the entry. + """ + # Create or coerce a protobuf request object. + request = datacatalog.ModifyEntryContactsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.modify_entry_contacts, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_tag_template(self, + request: Optional[Union[datacatalog.CreateTagTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + tag_template_id: Optional[str] = None, + tag_template: Optional[tags.TagTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplate: + r"""Creates a tag template. + + You must enable the Data Catalog API in the project identified + by the ``parent`` parameter. For more information, see [Data + Catalog resource project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_create_tag_template(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.CreateTagTemplateRequest( + parent="parent_value", + tag_template_id="tag_template_id_value", + ) + + # Make the request + response = await client.create_tag_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.CreateTagTemplateRequest, dict]]): + The request object. Request message for + [CreateTagTemplate][google.cloud.datacatalog.v1.DataCatalog.CreateTagTemplate]. + parent (:class:`str`): + Required. The name of the project and the template + location + `region `__. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag_template_id (:class:`str`): + Required. The ID of the tag template to create. + + The ID must contain only lowercase letters (a-z), + numbers (0-9), or underscores (_), and must start with a + letter or underscore. The maximum size is 64 bytes when + encoded in UTF-8. + + This corresponds to the ``tag_template_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag_template (:class:`google.cloud.datacatalog_v1.types.TagTemplate`): + Required. The tag template to create. + This corresponds to the ``tag_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.TagTemplate: + A tag template defines a tag that can have one or more + typed fields. + + The template is used to create tags that are attached to Google Cloud + resources. [Tag template roles] + + (https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) + provide permissions to create, edit, and use the + template. For example, see the [TagTemplate User] + (https://cloud.google.com/data-catalog/docs/how-to/template-user) + role that includes a permission to use the tag + template to tag resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, tag_template_id, tag_template]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.CreateTagTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if tag_template_id is not None: + request.tag_template_id = tag_template_id + if tag_template is not None: + request.tag_template = tag_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_tag_template, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_tag_template(self, + request: Optional[Union[datacatalog.GetTagTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplate: + r"""Gets a tag template. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_get_tag_template(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.GetTagTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_tag_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.GetTagTemplateRequest, dict]]): + The request object. Request message for + [GetTagTemplate][google.cloud.datacatalog.v1.DataCatalog.GetTagTemplate]. + name (:class:`str`): + Required. The name of the tag + template to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.TagTemplate: + A tag template defines a tag that can have one or more + typed fields. + + The template is used to create tags that are attached to Google Cloud + resources. [Tag template roles] + + (https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) + provide permissions to create, edit, and use the + template. For example, see the [TagTemplate User] + (https://cloud.google.com/data-catalog/docs/how-to/template-user) + role that includes a permission to use the tag + template to tag resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.GetTagTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_tag_template, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_tag_template(self, + request: Optional[Union[datacatalog.UpdateTagTemplateRequest, dict]] = None, + *, + tag_template: Optional[tags.TagTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplate: + r"""Updates a tag template. + + You can't update template fields with this method. These fields + are separate resources with their own create, update, and delete + methods. + + You must enable the Data Catalog API in the project identified + by the ``tag_template.name`` parameter. For more information, + see `Data Catalog resource + project `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_update_tag_template(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.UpdateTagTemplateRequest( + ) + + # Make the request + response = await client.update_tag_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.UpdateTagTemplateRequest, dict]]): + The request object. Request message for + [UpdateTagTemplate][google.cloud.datacatalog.v1.DataCatalog.UpdateTagTemplate]. + tag_template (:class:`google.cloud.datacatalog_v1.types.TagTemplate`): + Required. The template to update. The ``name`` field + must be set. + + This corresponds to the ``tag_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Names of fields whose values to overwrite on a tag + template. Currently, only ``display_name`` and + ``is_publicly_readable`` can be overwritten. + + If this parameter is absent or empty, all modifiable + fields are overwritten. If such fields are non-required + and omitted in the request body, their values are + emptied. + + Note: Updating the ``is_publicly_readable`` field may + require up to 12 hours to take effect in search results. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.TagTemplate: + A tag template defines a tag that can have one or more + typed fields. + + The template is used to create tags that are attached to Google Cloud + resources. [Tag template roles] + + (https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) + provide permissions to create, edit, and use the + template. For example, see the [TagTemplate User] + (https://cloud.google.com/data-catalog/docs/how-to/template-user) + role that includes a permission to use the tag + template to tag resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([tag_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.UpdateTagTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if tag_template is not None: + request.tag_template = tag_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_tag_template, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("tag_template.name", request.tag_template.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_tag_template(self, + request: Optional[Union[datacatalog.DeleteTagTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + force: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a tag template and all tags that use it. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see `Data + Catalog resource + project `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_delete_tag_template(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteTagTemplateRequest( + name="name_value", + force=True, + ) + + # Make the request + await client.delete_tag_template(request=request) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.DeleteTagTemplateRequest, dict]]): + The request object. Request message for + [DeleteTagTemplate][google.cloud.datacatalog.v1.DataCatalog.DeleteTagTemplate]. + name (:class:`str`): + Required. The name of the tag + template to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + force (:class:`bool`): + Required. If true, deletes all tags that use this + template. + + Currently, ``true`` is the only supported value. + + This corresponds to the ``force`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, force]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.DeleteTagTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if force is not None: + request.force = force + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_tag_template, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_tag_template_field(self, + request: Optional[Union[datacatalog.CreateTagTemplateFieldRequest, dict]] = None, + *, + parent: Optional[str] = None, + tag_template_field_id: Optional[str] = None, + tag_template_field: Optional[tags.TagTemplateField] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplateField: + r"""Creates a field in a tag template. + + You must enable the Data Catalog API in the project identified + by the ``parent`` parameter. For more information, see `Data + Catalog resource + project `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_create_tag_template_field(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + tag_template_field = datacatalog_v1.TagTemplateField() + tag_template_field.type_.primitive_type = "RICHTEXT" + + request = datacatalog_v1.CreateTagTemplateFieldRequest( + parent="parent_value", + tag_template_field_id="tag_template_field_id_value", + tag_template_field=tag_template_field, + ) + + # Make the request + response = await client.create_tag_template_field(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.CreateTagTemplateFieldRequest, dict]]): + The request object. Request message for + [CreateTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.CreateTagTemplateField]. + parent (:class:`str`): + Required. The name of the project and the template + location + `region `__. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag_template_field_id (:class:`str`): + Required. The ID of the tag template field to create. + + Note: Adding a required field to an existing template is + *not* allowed. + + Field IDs can contain letters (both uppercase and + lowercase), numbers (0-9), underscores (_) and dashes + (-). Field IDs must be at least 1 character long and at + most 128 characters long. Field IDs must also be unique + within their template. + + This corresponds to the ``tag_template_field_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag_template_field (:class:`google.cloud.datacatalog_v1.types.TagTemplateField`): + Required. The tag template field to + create. + + This corresponds to the ``tag_template_field`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.TagTemplateField: + The template for an individual field + within a tag template. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, tag_template_field_id, tag_template_field]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.CreateTagTemplateFieldRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if tag_template_field_id is not None: + request.tag_template_field_id = tag_template_field_id + if tag_template_field is not None: + request.tag_template_field = tag_template_field + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_tag_template_field, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_tag_template_field(self, + request: Optional[Union[datacatalog.UpdateTagTemplateFieldRequest, dict]] = None, + *, + name: Optional[str] = None, + tag_template_field: Optional[tags.TagTemplateField] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplateField: + r"""Updates a field in a tag template. + + You can't update the field type with this method. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see `Data + Catalog resource + project `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_update_tag_template_field(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + tag_template_field = datacatalog_v1.TagTemplateField() + tag_template_field.type_.primitive_type = "RICHTEXT" + + request = datacatalog_v1.UpdateTagTemplateFieldRequest( + name="name_value", + tag_template_field=tag_template_field, + ) + + # Make the request + response = await client.update_tag_template_field(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.UpdateTagTemplateFieldRequest, dict]]): + The request object. Request message for + [UpdateTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.UpdateTagTemplateField]. + name (:class:`str`): + Required. The name of the tag + template field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag_template_field (:class:`google.cloud.datacatalog_v1.types.TagTemplateField`): + Required. The template to update. + This corresponds to the ``tag_template_field`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Names of fields whose values to overwrite on + an individual field of a tag template. The following + fields are modifiable: + + - ``display_name`` + - ``type.enum_type`` + - ``is_required`` + + If this parameter is absent or empty, all modifiable + fields are overwritten. If such fields are non-required + and omitted in the request body, their values are + emptied with one exception: when updating an enum type, + the provided values are merged with the existing values. + Therefore, enum values can only be added, existing enum + values cannot be deleted or renamed. + + Additionally, updating a template field from optional to + required is *not* allowed. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.TagTemplateField: + The template for an individual field + within a tag template. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, tag_template_field, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.UpdateTagTemplateFieldRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if tag_template_field is not None: + request.tag_template_field = tag_template_field + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_tag_template_field, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def rename_tag_template_field(self, + request: Optional[Union[datacatalog.RenameTagTemplateFieldRequest, dict]] = None, + *, + name: Optional[str] = None, + new_tag_template_field_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplateField: + r"""Renames a field in a tag template. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see [Data + Catalog resource project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_rename_tag_template_field(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.RenameTagTemplateFieldRequest( + name="name_value", + new_tag_template_field_id="new_tag_template_field_id_value", + ) + + # Make the request + response = await client.rename_tag_template_field(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.RenameTagTemplateFieldRequest, dict]]): + The request object. Request message for + [RenameTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateField]. + name (:class:`str`): + Required. The name of the tag + template field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_tag_template_field_id (:class:`str`): + Required. The new ID of this tag template field. For + example, ``my_new_field``. + + This corresponds to the ``new_tag_template_field_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.TagTemplateField: + The template for an individual field + within a tag template. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, new_tag_template_field_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.RenameTagTemplateFieldRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if new_tag_template_field_id is not None: + request.new_tag_template_field_id = new_tag_template_field_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.rename_tag_template_field, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def rename_tag_template_field_enum_value(self, + request: Optional[Union[datacatalog.RenameTagTemplateFieldEnumValueRequest, dict]] = None, + *, + name: Optional[str] = None, + new_enum_value_display_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplateField: + r"""Renames an enum value in a tag template. + + Within a single enum field, enum values must be unique. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_rename_tag_template_field_enum_value(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.RenameTagTemplateFieldEnumValueRequest( + name="name_value", + new_enum_value_display_name="new_enum_value_display_name_value", + ) + + # Make the request + response = await client.rename_tag_template_field_enum_value(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.RenameTagTemplateFieldEnumValueRequest, dict]]): + The request object. Request message for + [RenameTagTemplateFieldEnumValue][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateFieldEnumValue]. + name (:class:`str`): + Required. The name of the enum field + value. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_enum_value_display_name (:class:`str`): + Required. The new display name of the enum value. For + example, ``my_new_enum_value``. + + This corresponds to the ``new_enum_value_display_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.TagTemplateField: + The template for an individual field + within a tag template. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, new_enum_value_display_name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.RenameTagTemplateFieldEnumValueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if new_enum_value_display_name is not None: + request.new_enum_value_display_name = new_enum_value_display_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.rename_tag_template_field_enum_value, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_tag_template_field(self, + request: Optional[Union[datacatalog.DeleteTagTemplateFieldRequest, dict]] = None, + *, + name: Optional[str] = None, + force: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a field in a tag template and all uses of this field + from the tags based on this template. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see `Data + Catalog resource + project `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_delete_tag_template_field(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteTagTemplateFieldRequest( + name="name_value", + force=True, + ) + + # Make the request + await client.delete_tag_template_field(request=request) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.DeleteTagTemplateFieldRequest, dict]]): + The request object. Request message for + [DeleteTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.DeleteTagTemplateField]. + name (:class:`str`): + Required. The name of the tag + template field to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + force (:class:`bool`): + Required. If true, deletes this field from any tags that + use it. + + Currently, ``true`` is the only supported value. + + This corresponds to the ``force`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, force]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.DeleteTagTemplateFieldRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if force is not None: + request.force = force + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_tag_template_field, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_tag(self, + request: Optional[Union[datacatalog.CreateTagRequest, dict]] = None, + *, + parent: Optional[str] = None, + tag: Optional[tags.Tag] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.Tag: + r"""Creates a tag and assigns it to: + + - An [Entry][google.cloud.datacatalog.v1.Entry] if the method + name is + ``projects.locations.entryGroups.entries.tags.create``. + - Or [EntryGroup][google.cloud.datacatalog.v1.EntryGroup]if the + method name is + ``projects.locations.entryGroups.tags.create``. + + Note: The project identified by the ``parent`` parameter for the + [tag] + (https://cloud.google.com/data-catalog/docs/reference/rest/v1/projects.locations.entryGroups.entries.tags/create#path-parameters) + and the [tag template] + (https://cloud.google.com/data-catalog/docs/reference/rest/v1/projects.locations.tagTemplates/create#path-parameters) + used to create the tag must be in the same organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_create_tag(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + tag = datacatalog_v1.Tag() + tag.column = "column_value" + tag.template = "template_value" + + request = datacatalog_v1.CreateTagRequest( + parent="parent_value", + tag=tag, + ) + + # Make the request + response = await client.create_tag(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.CreateTagRequest, dict]]): + The request object. Request message for + [CreateTag][google.cloud.datacatalog.v1.DataCatalog.CreateTag]. + parent (:class:`str`): + Required. The name of the resource to + attach this tag to. + Tags can be attached to entries or entry + groups. An entry can have up to 1000 + attached tags. + + Note: The tag and its child resources + might not be stored in the location + specified in its name. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag (:class:`google.cloud.datacatalog_v1.types.Tag`): + Required. The tag to create. + This corresponds to the ``tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Tag: + Tags contain custom metadata and are attached to Data Catalog resources. Tags + conform with the specification of their tag template. + + See [Data Catalog + IAM](\ https://cloud.google.com/data-catalog/docs/concepts/iam) + for information on the permissions needed to create + or view tags. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, tag]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.CreateTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if tag is not None: + request.tag = tag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_tag, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_tag(self, + request: Optional[Union[datacatalog.UpdateTagRequest, dict]] = None, + *, + tag: Optional[tags.Tag] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.Tag: + r"""Updates an existing tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_update_tag(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + tag = datacatalog_v1.Tag() + tag.column = "column_value" + tag.template = "template_value" + + request = datacatalog_v1.UpdateTagRequest( + tag=tag, + ) + + # Make the request + response = await client.update_tag(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.UpdateTagRequest, dict]]): + The request object. Request message for + [UpdateTag][google.cloud.datacatalog.v1.DataCatalog.UpdateTag]. + tag (:class:`google.cloud.datacatalog_v1.types.Tag`): + Required. The updated tag. The "name" + field must be set. + + This corresponds to the ``tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Names of fields whose values to overwrite on a tag. + Currently, a tag has the only modifiable field with the + name ``fields``. + + In general, if this parameter is absent or empty, all + modifiable fields are overwritten. If such fields are + non-required and omitted in the request body, their + values are emptied. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Tag: + Tags contain custom metadata and are attached to Data Catalog resources. Tags + conform with the specification of their tag template. + + See [Data Catalog + IAM](\ https://cloud.google.com/data-catalog/docs/concepts/iam) + for information on the permissions needed to create + or view tags. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([tag, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.UpdateTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if tag is not None: + request.tag = tag + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_tag, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("tag.name", request.tag.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_tag(self, + request: Optional[Union[datacatalog.DeleteTagRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_delete_tag(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteTagRequest( + name="name_value", + ) + + # Make the request + await client.delete_tag(request=request) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.DeleteTagRequest, dict]]): + The request object. Request message for + [DeleteTag][google.cloud.datacatalog.v1.DataCatalog.DeleteTag]. + name (:class:`str`): + Required. The name of the tag to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.DeleteTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_tag, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_tags(self, + request: Optional[Union[datacatalog.ListTagsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTagsAsyncPager: + r"""Lists tags assigned to an + [Entry][google.cloud.datacatalog.v1.Entry]. The + [columns][google.cloud.datacatalog.v1.Tag.column] in the + response are lowercased. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_list_tags(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.ListTagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tags(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.ListTagsRequest, dict]]): + The request object. Request message for + [ListTags][google.cloud.datacatalog.v1.DataCatalog.ListTags]. + parent (:class:`str`): + Required. The name of the Data Catalog resource to list + the tags of. + + The resource can be an + [Entry][google.cloud.datacatalog.v1.Entry] or an + [EntryGroup][google.cloud.datacatalog.v1.EntryGroup] + (without ``/entries/{entries}`` at the end). + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.services.data_catalog.pagers.ListTagsAsyncPager: + Response message for + [ListTags][google.cloud.datacatalog.v1.DataCatalog.ListTags]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.ListTagsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_tags, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTagsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def reconcile_tags(self, + request: Optional[Union[datacatalog.ReconcileTagsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""``ReconcileTags`` creates or updates a list of tags on the + entry. If the + [ReconcileTagsRequest.force_delete_missing][google.cloud.datacatalog.v1.ReconcileTagsRequest.force_delete_missing] + parameter is set, the operation deletes tags not included in the + input tag list. + + ``ReconcileTags`` returns a [long-running operation] + [google.longrunning.Operation] resource that can be queried with + [Operations.GetOperation][google.longrunning.Operations.GetOperation] + to return [ReconcileTagsMetadata] + [google.cloud.datacatalog.v1.ReconcileTagsMetadata] and a + [ReconcileTagsResponse] + [google.cloud.datacatalog.v1.ReconcileTagsResponse] message. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_reconcile_tags(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.ReconcileTagsRequest( + parent="parent_value", + tag_template="tag_template_value", + ) + + # Make the request + operation = client.reconcile_tags(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.ReconcileTagsRequest, dict]]): + The request object. Request message for + [ReconcileTags][google.cloud.datacatalog.v1.DataCatalog.ReconcileTags]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.datacatalog_v1.types.ReconcileTagsResponse` [Long-running operation][google.longrunning.Operation] + response message returned by + [ReconcileTags][google.cloud.datacatalog.v1.DataCatalog.ReconcileTags]. + + """ + # Create or coerce a protobuf request object. + request = datacatalog.ReconcileTagsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.reconcile_tags, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + datacatalog.ReconcileTagsResponse, + metadata_type=datacatalog.ReconcileTagsMetadata, + ) + + # Done; return the response. + return response + + async def star_entry(self, + request: Optional[Union[datacatalog.StarEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.StarEntryResponse: + r"""Marks an [Entry][google.cloud.datacatalog.v1.Entry] as starred + by the current user. Starring information is private to each + user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_star_entry(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.StarEntryRequest( + name="name_value", + ) + + # Make the request + response = await client.star_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.StarEntryRequest, dict]]): + The request object. Request message for + [StarEntry][google.cloud.datacatalog.v1.DataCatalog.StarEntry]. + name (:class:`str`): + Required. The name of the entry to + mark as starred. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.StarEntryResponse: + Response message for + [StarEntry][google.cloud.datacatalog.v1.DataCatalog.StarEntry]. + Empty for now + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.StarEntryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.star_entry, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def unstar_entry(self, + request: Optional[Union[datacatalog.UnstarEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.UnstarEntryResponse: + r"""Marks an [Entry][google.cloud.datacatalog.v1.Entry] as NOT + starred by the current user. Starring information is private to + each user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_unstar_entry(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.UnstarEntryRequest( + name="name_value", + ) + + # Make the request + response = await client.unstar_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.UnstarEntryRequest, dict]]): + The request object. Request message for + [UnstarEntry][google.cloud.datacatalog.v1.DataCatalog.UnstarEntry]. + name (:class:`str`): + Required. The name of the entry to mark as **not** + starred. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.UnstarEntryResponse: + Response message for + [UnstarEntry][google.cloud.datacatalog.v1.DataCatalog.UnstarEntry]. + Empty for now + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.UnstarEntryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.unstar_entry, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy(self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets an access control policy for a resource. Replaces any + existing policy. + + Supported resources are: + + - Tag templates + - Entry groups + + Note: This method sets policies only within Data Catalog and + can't be used to manage policies in BigQuery, Pub/Sub, Dataproc + Metastore, and any external Google Cloud Platform resources + synced with the Data Catalog. + + To call this method, you must have the following Google IAM + permissions: + + - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on + tag templates. + - ``datacatalog.entryGroups.setIamPolicy`` to set policies on + entry groups. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_set_iam_policy(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): + The request object. Request message for ``SetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy(self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a resource. + + May return: + + - A\ ``NOT_FOUND`` error if the resource doesn't exist or you + don't have the permission to view it. + - An empty policy if the resource exists but doesn't have a set + policy. + + Supported resources are: + + - Tag templates + - Entry groups + + Note: This method doesn't get policies from Google Cloud + Platform resources ingested into Data Catalog. + + To call this method, you must have the following Google IAM + permissions: + + - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on + tag templates. + - ``datacatalog.entryGroups.getIamPolicy`` to get policies on + entry groups. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_get_iam_policy(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): + The request object. Request message for ``GetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource, ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions(self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Gets your permissions on a resource. + + Returns an empty set of permissions if the resource + doesn't exist. + + Supported resources are: + + - Tag templates + - Entry groups + + Note: This method gets policies only within Data Catalog + and can't be used to get policies from BigQuery, + Pub/Sub, Dataproc Metastore, and any external Google + Cloud Platform resources ingested into Data Catalog. + + No Google IAM permissions are required to call this + method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_test_iam_permissions(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def import_entries(self, + request: Optional[Union[datacatalog.ImportEntriesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Imports entries from a source, such as data previously dumped + into a Cloud Storage bucket, into Data Catalog. Import of + entries is a sync operation that reconciles the state of the + third-party system with the Data Catalog. + + ``ImportEntries`` accepts source data snapshots of a third-party + system. Snapshot should be delivered as a .wire or + base65-encoded .txt file containing a sequence of Protocol + Buffer messages of + [DumpItem][google.cloud.datacatalog.v1.DumpItem] type. + + ``ImportEntries`` returns a [long-running operation] + [google.longrunning.Operation] resource that can be queried with + [Operations.GetOperation][google.longrunning.Operations.GetOperation] + to return + [ImportEntriesMetadata][google.cloud.datacatalog.v1.ImportEntriesMetadata] + and an + [ImportEntriesResponse][google.cloud.datacatalog.v1.ImportEntriesResponse] + message. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_import_entries(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.ImportEntriesRequest( + gcs_bucket_path="gcs_bucket_path_value", + parent="parent_value", + ) + + # Make the request + operation = client.import_entries(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.ImportEntriesRequest, dict]]): + The request object. Request message for + [ImportEntries][google.cloud.datacatalog.v1.DataCatalog.ImportEntries] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.datacatalog_v1.types.ImportEntriesResponse` Response message for [long-running operation][google.longrunning.Operation] + returned by the + [ImportEntries][google.cloud.datacatalog.v1.DataCatalog.ImportEntries]. + + """ + # Create or coerce a protobuf request object. + request = datacatalog.ImportEntriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.import_entries, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + datacatalog.ImportEntriesResponse, + metadata_type=datacatalog.ImportEntriesMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def __aenter__(self) -> "DataCatalogAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "DataCatalogAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/client.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/client.py new file mode 100644 index 000000000000..c3a254ba06cc --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/client.py @@ -0,0 +1,4761 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.datacatalog_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.datacatalog_v1.services.data_catalog import pagers +from google.cloud.datacatalog_v1.types import common +from google.cloud.datacatalog_v1.types import data_source +from google.cloud.datacatalog_v1.types import datacatalog +from google.cloud.datacatalog_v1.types import gcs_fileset_spec +from google.cloud.datacatalog_v1.types import schema +from google.cloud.datacatalog_v1.types import search +from google.cloud.datacatalog_v1.types import table_spec +from google.cloud.datacatalog_v1.types import tags +from google.cloud.datacatalog_v1.types import timestamps +from google.cloud.datacatalog_v1.types import usage +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import DataCatalogGrpcTransport +from .transports.grpc_asyncio import DataCatalogGrpcAsyncIOTransport + + +class DataCatalogClientMeta(type): + """Metaclass for the DataCatalog client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[DataCatalogTransport]] + _transport_registry["grpc"] = DataCatalogGrpcTransport + _transport_registry["grpc_asyncio"] = DataCatalogGrpcAsyncIOTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[DataCatalogTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DataCatalogClient(metaclass=DataCatalogClientMeta): + """Data Catalog API service allows you to discover, understand, + and manage your data. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "datacatalog.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataCatalogClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataCatalogClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DataCatalogTransport: + """Returns the transport used by the client instance. + + Returns: + DataCatalogTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def entry_path(project: str,location: str,entry_group: str,entry: str,) -> str: + """Returns a fully-qualified entry string.""" + return "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format(project=project, location=location, entry_group=entry_group, entry=entry, ) + + @staticmethod + def parse_entry_path(path: str) -> Dict[str,str]: + """Parses a entry path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)/entries/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def entry_group_path(project: str,location: str,entry_group: str,) -> str: + """Returns a fully-qualified entry_group string.""" + return "projects/{project}/locations/{location}/entryGroups/{entry_group}".format(project=project, location=location, entry_group=entry_group, ) + + @staticmethod + def parse_entry_group_path(path: str) -> Dict[str,str]: + """Parses a entry_group path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def tag_path(project: str,location: str,entry_group: str,entry: str,tag: str,) -> str: + """Returns a fully-qualified tag string.""" + return "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format(project=project, location=location, entry_group=entry_group, entry=entry, tag=tag, ) + + @staticmethod + def parse_tag_path(path: str) -> Dict[str,str]: + """Parses a tag path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)/entries/(?P.+?)/tags/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def tag_template_path(project: str,location: str,tag_template: str,) -> str: + """Returns a fully-qualified tag_template string.""" + return "projects/{project}/locations/{location}/tagTemplates/{tag_template}".format(project=project, location=location, tag_template=tag_template, ) + + @staticmethod + def parse_tag_template_path(path: str) -> Dict[str,str]: + """Parses a tag_template path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/tagTemplates/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def tag_template_field_path(project: str,location: str,tag_template: str,field: str,) -> str: + """Returns a fully-qualified tag_template_field string.""" + return "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}".format(project=project, location=location, tag_template=tag_template, field=field, ) + + @staticmethod + def parse_tag_template_field_path(path: str) -> Dict[str,str]: + """Parses a tag_template_field path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/tagTemplates/(?P.+?)/fields/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def tag_template_field_enum_value_path(project: str,location: str,tag_template: str,tag_template_field_id: str,enum_value_display_name: str,) -> str: + """Returns a fully-qualified tag_template_field_enum_value string.""" + return "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name}".format(project=project, location=location, tag_template=tag_template, tag_template_field_id=tag_template_field_id, enum_value_display_name=enum_value_display_name, ) + + @staticmethod + def parse_tag_template_field_enum_value_path(path: str) -> Dict[str,str]: + """Parses a tag_template_field_enum_value path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/tagTemplates/(?P.+?)/fields/(?P.+?)/enumValues/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DataCatalogTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data catalog client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, DataCatalogTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DataCatalogTransport): + # transport is a DataCatalogTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def search_catalog(self, + request: Optional[Union[datacatalog.SearchCatalogRequest, dict]] = None, + *, + scope: Optional[datacatalog.SearchCatalogRequest.Scope] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchCatalogPager: + r"""Searches Data Catalog for multiple resources like entries and + tags that match a query. + + This is a [Custom Method] + (https://cloud.google.com/apis/design/custom_methods) that + doesn't return all information on a resource, only its ID and + high level fields. To get more information, you can subsequently + call specific get methods. + + Note: Data Catalog search queries don't guarantee full recall. + Results that match your query might not be returned, even in + subsequent result pages. Additionally, returned (and not + returned) results can vary if you repeat search queries. + + For more information, see [Data Catalog search syntax] + (https://cloud.google.com/data-catalog/docs/how-to/search-reference). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_search_catalog(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.SearchCatalogRequest( + ) + + # Make the request + page_result = client.search_catalog(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.SearchCatalogRequest, dict]): + The request object. Request message for + [SearchCatalog][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog]. + scope (google.cloud.datacatalog_v1.types.SearchCatalogRequest.Scope): + Required. The scope of this search request. + + The ``scope`` is invalid if ``include_org_ids``, + ``include_project_ids`` are empty AND + ``include_gcp_public_datasets`` is set to ``false``. In + this case, the request returns an error. + + This corresponds to the ``scope`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (str): + Optional. The query string with a minimum of 3 + characters and specific syntax. For more information, + see `Data Catalog search + syntax `__. + + An empty query string returns all data assets (in the + specified scope) that you have access to. + + A query string can be a simple ``xyz`` or qualified by + predicates: + + - ``name:x`` + - ``column:y`` + - ``description:z`` + + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.services.data_catalog.pagers.SearchCatalogPager: + Response message for + [SearchCatalog][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([scope, query]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.SearchCatalogRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.SearchCatalogRequest): + request = datacatalog.SearchCatalogRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if scope is not None: + request.scope = scope + if query is not None: + request.query = query + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search_catalog] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.SearchCatalogPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_entry_group(self, + request: Optional[Union[datacatalog.CreateEntryGroupRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry_group_id: Optional[str] = None, + entry_group: Optional[datacatalog.EntryGroup] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.EntryGroup: + r"""Creates an entry group. + + An entry group contains logically related entries together with + `Cloud Identity and Access + Management `__ policies. These + policies specify users who can create, edit, and view entries + within entry groups. + + Data Catalog automatically creates entry groups with names that + start with the ``@`` symbol for the following resources: + + - BigQuery entries (``@bigquery``) + - Pub/Sub topics (``@pubsub``) + - Dataproc Metastore services + (``@dataproc_metastore_{SERVICE_NAME_HASH}``) + + You can create your own entry groups for Cloud Storage fileset + entries and custom entries together with the corresponding IAM + policies. User-created entry groups can't contain the ``@`` + symbol, it is reserved for automatically created groups. + + Entry groups, like entries, can be searched. + + A maximum of 10,000 entry groups may be created per organization + across all locations. + + You must enable the Data Catalog API in the project identified + by the ``parent`` parameter. For more information, see `Data + Catalog resource + project `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_create_entry_group(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.CreateEntryGroupRequest( + parent="parent_value", + entry_group_id="entry_group_id_value", + ) + + # Make the request + response = client.create_entry_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.CreateEntryGroupRequest, dict]): + The request object. Request message for + [CreateEntryGroup][google.cloud.datacatalog.v1.DataCatalog.CreateEntryGroup]. + parent (str): + Required. The names of the project + and location that the new entry group + belongs to. + + Note: The entry group itself and its + child resources might not be stored in + the location specified in its name. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_group_id (str): + Required. The ID of the entry group to create. + + The ID must contain only letters (a-z, A-Z), numbers + (0-9), underscores (_), and must start with a letter or + underscore. The maximum size is 64 bytes when encoded in + UTF-8. + + This corresponds to the ``entry_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_group (google.cloud.datacatalog_v1.types.EntryGroup): + The entry group to create. Defaults + to empty. + + This corresponds to the ``entry_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.EntryGroup: + Entry group metadata. + + An EntryGroup resource represents a logical grouping + of zero or more Data Catalog + [Entry][google.cloud.datacatalog.v1.Entry] resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entry_group_id, entry_group]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.CreateEntryGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.CreateEntryGroupRequest): + request = datacatalog.CreateEntryGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry_group_id is not None: + request.entry_group_id = entry_group_id + if entry_group is not None: + request.entry_group = entry_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_entry_group(self, + request: Optional[Union[datacatalog.GetEntryGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + read_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.EntryGroup: + r"""Gets an entry group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_get_entry_group(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.GetEntryGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.GetEntryGroupRequest, dict]): + The request object. Request message for + [GetEntryGroup][google.cloud.datacatalog.v1.DataCatalog.GetEntryGroup]. + name (str): + Required. The name of the entry group + to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + read_mask (google.protobuf.field_mask_pb2.FieldMask): + The fields to return. If empty or + omitted, all fields are returned. + + This corresponds to the ``read_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.EntryGroup: + Entry group metadata. + + An EntryGroup resource represents a logical grouping + of zero or more Data Catalog + [Entry][google.cloud.datacatalog.v1.Entry] resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, read_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.GetEntryGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.GetEntryGroupRequest): + request = datacatalog.GetEntryGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if read_mask is not None: + request.read_mask = read_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_entry_group(self, + request: Optional[Union[datacatalog.UpdateEntryGroupRequest, dict]] = None, + *, + entry_group: Optional[datacatalog.EntryGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.EntryGroup: + r"""Updates an entry group. + + You must enable the Data Catalog API in the project identified + by the ``entry_group.name`` parameter. For more information, see + `Data Catalog resource + project `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_update_entry_group(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.UpdateEntryGroupRequest( + ) + + # Make the request + response = client.update_entry_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.UpdateEntryGroupRequest, dict]): + The request object. Request message for + [UpdateEntryGroup][google.cloud.datacatalog.v1.DataCatalog.UpdateEntryGroup]. + entry_group (google.cloud.datacatalog_v1.types.EntryGroup): + Required. Updates for the entry group. The ``name`` + field must be set. + + This corresponds to the ``entry_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Names of fields whose values to + overwrite on an entry group. + If this parameter is absent or empty, + all modifiable fields are overwritten. + If such fields are non-required and + omitted in the request body, their + values are emptied. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.EntryGroup: + Entry group metadata. + + An EntryGroup resource represents a logical grouping + of zero or more Data Catalog + [Entry][google.cloud.datacatalog.v1.Entry] resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([entry_group, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.UpdateEntryGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.UpdateEntryGroupRequest): + request = datacatalog.UpdateEntryGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entry_group is not None: + request.entry_group = entry_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entry_group.name", request.entry_group.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_entry_group(self, + request: Optional[Union[datacatalog.DeleteEntryGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an entry group. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see `Data + Catalog resource + project `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_delete_entry_group(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteEntryGroupRequest( + name="name_value", + ) + + # Make the request + client.delete_entry_group(request=request) + + Args: + request (Union[google.cloud.datacatalog_v1.types.DeleteEntryGroupRequest, dict]): + The request object. Request message for + [DeleteEntryGroup][google.cloud.datacatalog.v1.DataCatalog.DeleteEntryGroup]. + name (str): + Required. The name of the entry group + to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.DeleteEntryGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.DeleteEntryGroupRequest): + request = datacatalog.DeleteEntryGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_entry_groups(self, + request: Optional[Union[datacatalog.ListEntryGroupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntryGroupsPager: + r"""Lists entry groups. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_list_entry_groups(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.ListEntryGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.ListEntryGroupsRequest, dict]): + The request object. Request message for + [ListEntryGroups][google.cloud.datacatalog.v1.DataCatalog.ListEntryGroups]. + parent (str): + Required. The name of the location + that contains the entry groups to list. + Can be provided as a URL. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.services.data_catalog.pagers.ListEntryGroupsPager: + Response message for + [ListEntryGroups][google.cloud.datacatalog.v1.DataCatalog.ListEntryGroups]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.ListEntryGroupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.ListEntryGroupsRequest): + request = datacatalog.ListEntryGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_entry_groups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEntryGroupsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_entry(self, + request: Optional[Union[datacatalog.CreateEntryRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry_id: Optional[str] = None, + entry: Optional[datacatalog.Entry] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.Entry: + r"""Creates an entry. + + You can create entries only with 'FILESET', 'CLUSTER', + 'DATA_STREAM', or custom types. Data Catalog automatically + creates entries with other types during metadata ingestion from + integrated systems. + + You must enable the Data Catalog API in the project identified + by the ``parent`` parameter. For more information, see `Data + Catalog resource + project `__. + + An entry group can have a maximum of 100,000 entries. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_create_entry(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + entry = datacatalog_v1.Entry() + entry.type_ = "LOOK" + entry.integrated_system = "VERTEX_AI" + entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] + + request = datacatalog_v1.CreateEntryRequest( + parent="parent_value", + entry_id="entry_id_value", + entry=entry, + ) + + # Make the request + response = client.create_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.CreateEntryRequest, dict]): + The request object. Request message for + [CreateEntry][google.cloud.datacatalog.v1.DataCatalog.CreateEntry]. + parent (str): + Required. The name of the entry group + this entry belongs to. + Note: The entry itself and its child + resources might not be stored in the + location specified in its name. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_id (str): + Required. The ID of the entry to create. + + The ID must contain only letters (a-z, A-Z), numbers + (0-9), and underscores (_). The maximum size is 64 bytes + when encoded in UTF-8. + + This corresponds to the ``entry_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry (google.cloud.datacatalog_v1.types.Entry): + Required. The entry to create. + This corresponds to the ``entry`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Entry: + Entry metadata. + A Data Catalog entry represents another resource in + Google Cloud Platform (such as a BigQuery dataset or + a Pub/Sub topic) or outside of it. You can use the + linked_resource field in the entry resource to refer + to the original resource ID of the source system. + + An entry resource contains resource details, for + example, its schema. Additionally, you can attach + flexible metadata to an entry in the form of a + [Tag][google.cloud.datacatalog.v1.Tag]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entry_id, entry]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.CreateEntryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.CreateEntryRequest): + request = datacatalog.CreateEntryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry_id is not None: + request.entry_id = entry_id + if entry is not None: + request.entry = entry + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_entry(self, + request: Optional[Union[datacatalog.UpdateEntryRequest, dict]] = None, + *, + entry: Optional[datacatalog.Entry] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.Entry: + r"""Updates an existing entry. + + You must enable the Data Catalog API in the project identified + by the ``entry.name`` parameter. For more information, see `Data + Catalog resource + project `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_update_entry(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + entry = datacatalog_v1.Entry() + entry.type_ = "LOOK" + entry.integrated_system = "VERTEX_AI" + entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] + + request = datacatalog_v1.UpdateEntryRequest( + entry=entry, + ) + + # Make the request + response = client.update_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.UpdateEntryRequest, dict]): + The request object. Request message for + [UpdateEntry][google.cloud.datacatalog.v1.DataCatalog.UpdateEntry]. + entry (google.cloud.datacatalog_v1.types.Entry): + Required. Updates for the entry. The ``name`` field must + be set. + + This corresponds to the ``entry`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Names of fields whose values to overwrite on an entry. + + If this parameter is absent or empty, all modifiable + fields are overwritten. If such fields are non-required + and omitted in the request body, their values are + emptied. + + You can modify only the fields listed below. + + For entries with type ``DATA_STREAM``: + + - ``schema`` + + For entries with type ``FILESET``: + + - ``schema`` + - ``display_name`` + - ``description`` + - ``gcs_fileset_spec`` + - ``gcs_fileset_spec.file_patterns`` + + For entries with ``user_specified_type``: + + - ``schema`` + - ``display_name`` + - ``description`` + - ``user_specified_type`` + - ``user_specified_system`` + - ``linked_resource`` + - ``source_system_timestamps`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Entry: + Entry metadata. + A Data Catalog entry represents another resource in + Google Cloud Platform (such as a BigQuery dataset or + a Pub/Sub topic) or outside of it. You can use the + linked_resource field in the entry resource to refer + to the original resource ID of the source system. + + An entry resource contains resource details, for + example, its schema. Additionally, you can attach + flexible metadata to an entry in the form of a + [Tag][google.cloud.datacatalog.v1.Tag]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([entry, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.UpdateEntryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.UpdateEntryRequest): + request = datacatalog.UpdateEntryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entry is not None: + request.entry = entry + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entry.name", request.entry.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_entry(self, + request: Optional[Union[datacatalog.DeleteEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an existing entry. + + You can delete only the entries created by the + [CreateEntry][google.cloud.datacatalog.v1.DataCatalog.CreateEntry] + method. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see `Data + Catalog resource + project `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_delete_entry(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteEntryRequest( + name="name_value", + ) + + # Make the request + client.delete_entry(request=request) + + Args: + request (Union[google.cloud.datacatalog_v1.types.DeleteEntryRequest, dict]): + The request object. Request message for + [DeleteEntry][google.cloud.datacatalog.v1.DataCatalog.DeleteEntry]. + name (str): + Required. The name of the entry to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.DeleteEntryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.DeleteEntryRequest): + request = datacatalog.DeleteEntryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_entry(self, + request: Optional[Union[datacatalog.GetEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.Entry: + r"""Gets an entry. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_get_entry(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.GetEntryRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.GetEntryRequest, dict]): + The request object. Request message for + [GetEntry][google.cloud.datacatalog.v1.DataCatalog.GetEntry]. + name (str): + Required. The name of the entry to + get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Entry: + Entry metadata. + A Data Catalog entry represents another resource in + Google Cloud Platform (such as a BigQuery dataset or + a Pub/Sub topic) or outside of it. You can use the + linked_resource field in the entry resource to refer + to the original resource ID of the source system. + + An entry resource contains resource details, for + example, its schema. Additionally, you can attach + flexible metadata to an entry in the form of a + [Tag][google.cloud.datacatalog.v1.Tag]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.GetEntryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.GetEntryRequest): + request = datacatalog.GetEntryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def lookup_entry(self, + request: Optional[Union[datacatalog.LookupEntryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.Entry: + r"""Gets an entry by its target resource name. + + The resource name comes from the source Google Cloud + Platform service. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_lookup_entry(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.LookupEntryRequest( + linked_resource="linked_resource_value", + ) + + # Make the request + response = client.lookup_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.LookupEntryRequest, dict]): + The request object. Request message for + [LookupEntry][google.cloud.datacatalog.v1.DataCatalog.LookupEntry]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Entry: + Entry metadata. + A Data Catalog entry represents another resource in + Google Cloud Platform (such as a BigQuery dataset or + a Pub/Sub topic) or outside of it. You can use the + linked_resource field in the entry resource to refer + to the original resource ID of the source system. + + An entry resource contains resource details, for + example, its schema. Additionally, you can attach + flexible metadata to an entry in the form of a + [Tag][google.cloud.datacatalog.v1.Tag]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.LookupEntryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.LookupEntryRequest): + request = datacatalog.LookupEntryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.lookup_entry] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_entries(self, + request: Optional[Union[datacatalog.ListEntriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntriesPager: + r"""Lists entries. + + Note: Currently, this method can list only custom entries. To + get a list of both custom and automatically created entries, use + [SearchCatalog][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_list_entries(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.ListEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.ListEntriesRequest, dict]): + The request object. Request message for + [ListEntries][google.cloud.datacatalog.v1.DataCatalog.ListEntries]. + parent (str): + Required. The name of the entry group + that contains the entries to list. + Can be provided in URL format. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.services.data_catalog.pagers.ListEntriesPager: + Response message for + [ListEntries][google.cloud.datacatalog.v1.DataCatalog.ListEntries]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.ListEntriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.ListEntriesRequest): + request = datacatalog.ListEntriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_entries] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEntriesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def modify_entry_overview(self, + request: Optional[Union[datacatalog.ModifyEntryOverviewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.EntryOverview: + r"""Modifies entry overview, part of the business context of an + [Entry][google.cloud.datacatalog.v1.Entry]. + + To call this method, you must have the + ``datacatalog.entries.updateOverview`` IAM permission on the + corresponding project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_modify_entry_overview(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.ModifyEntryOverviewRequest( + name="name_value", + ) + + # Make the request + response = client.modify_entry_overview(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.ModifyEntryOverviewRequest, dict]): + The request object. Request message for + [ModifyEntryOverview][google.cloud.datacatalog.v1.DataCatalog.ModifyEntryOverview]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.EntryOverview: + Entry overview fields for rich text + descriptions of entries. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.ModifyEntryOverviewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.ModifyEntryOverviewRequest): + request = datacatalog.ModifyEntryOverviewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.modify_entry_overview] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def modify_entry_contacts(self, + request: Optional[Union[datacatalog.ModifyEntryContactsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.Contacts: + r"""Modifies contacts, part of the business context of an + [Entry][google.cloud.datacatalog.v1.Entry]. + + To call this method, you must have the + ``datacatalog.entries.updateContacts`` IAM permission on the + corresponding project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_modify_entry_contacts(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.ModifyEntryContactsRequest( + name="name_value", + ) + + # Make the request + response = client.modify_entry_contacts(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.ModifyEntryContactsRequest, dict]): + The request object. Request message for + [ModifyEntryContacts][google.cloud.datacatalog.v1.DataCatalog.ModifyEntryContacts]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Contacts: + Contact people for the entry. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.ModifyEntryContactsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.ModifyEntryContactsRequest): + request = datacatalog.ModifyEntryContactsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.modify_entry_contacts] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_tag_template(self, + request: Optional[Union[datacatalog.CreateTagTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + tag_template_id: Optional[str] = None, + tag_template: Optional[tags.TagTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplate: + r"""Creates a tag template. + + You must enable the Data Catalog API in the project identified + by the ``parent`` parameter. For more information, see [Data + Catalog resource project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_create_tag_template(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.CreateTagTemplateRequest( + parent="parent_value", + tag_template_id="tag_template_id_value", + ) + + # Make the request + response = client.create_tag_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.CreateTagTemplateRequest, dict]): + The request object. Request message for + [CreateTagTemplate][google.cloud.datacatalog.v1.DataCatalog.CreateTagTemplate]. + parent (str): + Required. The name of the project and the template + location + `region `__. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag_template_id (str): + Required. The ID of the tag template to create. + + The ID must contain only lowercase letters (a-z), + numbers (0-9), or underscores (_), and must start with a + letter or underscore. The maximum size is 64 bytes when + encoded in UTF-8. + + This corresponds to the ``tag_template_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag_template (google.cloud.datacatalog_v1.types.TagTemplate): + Required. The tag template to create. + This corresponds to the ``tag_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.TagTemplate: + A tag template defines a tag that can have one or more + typed fields. + + The template is used to create tags that are attached to Google Cloud + resources. [Tag template roles] + + (https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) + provide permissions to create, edit, and use the + template. For example, see the [TagTemplate User] + (https://cloud.google.com/data-catalog/docs/how-to/template-user) + role that includes a permission to use the tag + template to tag resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, tag_template_id, tag_template]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.CreateTagTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.CreateTagTemplateRequest): + request = datacatalog.CreateTagTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if tag_template_id is not None: + request.tag_template_id = tag_template_id + if tag_template is not None: + request.tag_template = tag_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_tag_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_tag_template(self, + request: Optional[Union[datacatalog.GetTagTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplate: + r"""Gets a tag template. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_get_tag_template(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.GetTagTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_tag_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.GetTagTemplateRequest, dict]): + The request object. Request message for + [GetTagTemplate][google.cloud.datacatalog.v1.DataCatalog.GetTagTemplate]. + name (str): + Required. The name of the tag + template to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.TagTemplate: + A tag template defines a tag that can have one or more + typed fields. + + The template is used to create tags that are attached to Google Cloud + resources. [Tag template roles] + + (https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) + provide permissions to create, edit, and use the + template. For example, see the [TagTemplate User] + (https://cloud.google.com/data-catalog/docs/how-to/template-user) + role that includes a permission to use the tag + template to tag resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.GetTagTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.GetTagTemplateRequest): + request = datacatalog.GetTagTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_tag_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_tag_template(self, + request: Optional[Union[datacatalog.UpdateTagTemplateRequest, dict]] = None, + *, + tag_template: Optional[tags.TagTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplate: + r"""Updates a tag template. + + You can't update template fields with this method. These fields + are separate resources with their own create, update, and delete + methods. + + You must enable the Data Catalog API in the project identified + by the ``tag_template.name`` parameter. For more information, + see `Data Catalog resource + project `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_update_tag_template(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.UpdateTagTemplateRequest( + ) + + # Make the request + response = client.update_tag_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.UpdateTagTemplateRequest, dict]): + The request object. Request message for + [UpdateTagTemplate][google.cloud.datacatalog.v1.DataCatalog.UpdateTagTemplate]. + tag_template (google.cloud.datacatalog_v1.types.TagTemplate): + Required. The template to update. The ``name`` field + must be set. + + This corresponds to the ``tag_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Names of fields whose values to overwrite on a tag + template. Currently, only ``display_name`` and + ``is_publicly_readable`` can be overwritten. + + If this parameter is absent or empty, all modifiable + fields are overwritten. If such fields are non-required + and omitted in the request body, their values are + emptied. + + Note: Updating the ``is_publicly_readable`` field may + require up to 12 hours to take effect in search results. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.TagTemplate: + A tag template defines a tag that can have one or more + typed fields. + + The template is used to create tags that are attached to Google Cloud + resources. [Tag template roles] + + (https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) + provide permissions to create, edit, and use the + template. For example, see the [TagTemplate User] + (https://cloud.google.com/data-catalog/docs/how-to/template-user) + role that includes a permission to use the tag + template to tag resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([tag_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.UpdateTagTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.UpdateTagTemplateRequest): + request = datacatalog.UpdateTagTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if tag_template is not None: + request.tag_template = tag_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_tag_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("tag_template.name", request.tag_template.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_tag_template(self, + request: Optional[Union[datacatalog.DeleteTagTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + force: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a tag template and all tags that use it. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see `Data + Catalog resource + project `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_delete_tag_template(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteTagTemplateRequest( + name="name_value", + force=True, + ) + + # Make the request + client.delete_tag_template(request=request) + + Args: + request (Union[google.cloud.datacatalog_v1.types.DeleteTagTemplateRequest, dict]): + The request object. Request message for + [DeleteTagTemplate][google.cloud.datacatalog.v1.DataCatalog.DeleteTagTemplate]. + name (str): + Required. The name of the tag + template to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + force (bool): + Required. If true, deletes all tags that use this + template. + + Currently, ``true`` is the only supported value. + + This corresponds to the ``force`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, force]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.DeleteTagTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.DeleteTagTemplateRequest): + request = datacatalog.DeleteTagTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if force is not None: + request.force = force + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_tag_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_tag_template_field(self, + request: Optional[Union[datacatalog.CreateTagTemplateFieldRequest, dict]] = None, + *, + parent: Optional[str] = None, + tag_template_field_id: Optional[str] = None, + tag_template_field: Optional[tags.TagTemplateField] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplateField: + r"""Creates a field in a tag template. + + You must enable the Data Catalog API in the project identified + by the ``parent`` parameter. For more information, see `Data + Catalog resource + project `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_create_tag_template_field(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + tag_template_field = datacatalog_v1.TagTemplateField() + tag_template_field.type_.primitive_type = "RICHTEXT" + + request = datacatalog_v1.CreateTagTemplateFieldRequest( + parent="parent_value", + tag_template_field_id="tag_template_field_id_value", + tag_template_field=tag_template_field, + ) + + # Make the request + response = client.create_tag_template_field(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.CreateTagTemplateFieldRequest, dict]): + The request object. Request message for + [CreateTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.CreateTagTemplateField]. + parent (str): + Required. The name of the project and the template + location + `region `__. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag_template_field_id (str): + Required. The ID of the tag template field to create. + + Note: Adding a required field to an existing template is + *not* allowed. + + Field IDs can contain letters (both uppercase and + lowercase), numbers (0-9), underscores (_) and dashes + (-). Field IDs must be at least 1 character long and at + most 128 characters long. Field IDs must also be unique + within their template. + + This corresponds to the ``tag_template_field_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag_template_field (google.cloud.datacatalog_v1.types.TagTemplateField): + Required. The tag template field to + create. + + This corresponds to the ``tag_template_field`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.TagTemplateField: + The template for an individual field + within a tag template. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, tag_template_field_id, tag_template_field]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.CreateTagTemplateFieldRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.CreateTagTemplateFieldRequest): + request = datacatalog.CreateTagTemplateFieldRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if tag_template_field_id is not None: + request.tag_template_field_id = tag_template_field_id + if tag_template_field is not None: + request.tag_template_field = tag_template_field + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_tag_template_field] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_tag_template_field(self, + request: Optional[Union[datacatalog.UpdateTagTemplateFieldRequest, dict]] = None, + *, + name: Optional[str] = None, + tag_template_field: Optional[tags.TagTemplateField] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplateField: + r"""Updates a field in a tag template. + + You can't update the field type with this method. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see `Data + Catalog resource + project `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_update_tag_template_field(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + tag_template_field = datacatalog_v1.TagTemplateField() + tag_template_field.type_.primitive_type = "RICHTEXT" + + request = datacatalog_v1.UpdateTagTemplateFieldRequest( + name="name_value", + tag_template_field=tag_template_field, + ) + + # Make the request + response = client.update_tag_template_field(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.UpdateTagTemplateFieldRequest, dict]): + The request object. Request message for + [UpdateTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.UpdateTagTemplateField]. + name (str): + Required. The name of the tag + template field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag_template_field (google.cloud.datacatalog_v1.types.TagTemplateField): + Required. The template to update. + This corresponds to the ``tag_template_field`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Names of fields whose values to overwrite on + an individual field of a tag template. The following + fields are modifiable: + + - ``display_name`` + - ``type.enum_type`` + - ``is_required`` + + If this parameter is absent or empty, all modifiable + fields are overwritten. If such fields are non-required + and omitted in the request body, their values are + emptied with one exception: when updating an enum type, + the provided values are merged with the existing values. + Therefore, enum values can only be added, existing enum + values cannot be deleted or renamed. + + Additionally, updating a template field from optional to + required is *not* allowed. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.TagTemplateField: + The template for an individual field + within a tag template. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, tag_template_field, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.UpdateTagTemplateFieldRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.UpdateTagTemplateFieldRequest): + request = datacatalog.UpdateTagTemplateFieldRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if tag_template_field is not None: + request.tag_template_field = tag_template_field + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_tag_template_field] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def rename_tag_template_field(self, + request: Optional[Union[datacatalog.RenameTagTemplateFieldRequest, dict]] = None, + *, + name: Optional[str] = None, + new_tag_template_field_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplateField: + r"""Renames a field in a tag template. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see [Data + Catalog resource project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_rename_tag_template_field(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.RenameTagTemplateFieldRequest( + name="name_value", + new_tag_template_field_id="new_tag_template_field_id_value", + ) + + # Make the request + response = client.rename_tag_template_field(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.RenameTagTemplateFieldRequest, dict]): + The request object. Request message for + [RenameTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateField]. + name (str): + Required. The name of the tag + template field. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_tag_template_field_id (str): + Required. The new ID of this tag template field. For + example, ``my_new_field``. + + This corresponds to the ``new_tag_template_field_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.TagTemplateField: + The template for an individual field + within a tag template. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, new_tag_template_field_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.RenameTagTemplateFieldRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.RenameTagTemplateFieldRequest): + request = datacatalog.RenameTagTemplateFieldRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if new_tag_template_field_id is not None: + request.new_tag_template_field_id = new_tag_template_field_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.rename_tag_template_field] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def rename_tag_template_field_enum_value(self, + request: Optional[Union[datacatalog.RenameTagTemplateFieldEnumValueRequest, dict]] = None, + *, + name: Optional[str] = None, + new_enum_value_display_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplateField: + r"""Renames an enum value in a tag template. + + Within a single enum field, enum values must be unique. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_rename_tag_template_field_enum_value(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.RenameTagTemplateFieldEnumValueRequest( + name="name_value", + new_enum_value_display_name="new_enum_value_display_name_value", + ) + + # Make the request + response = client.rename_tag_template_field_enum_value(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.RenameTagTemplateFieldEnumValueRequest, dict]): + The request object. Request message for + [RenameTagTemplateFieldEnumValue][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateFieldEnumValue]. + name (str): + Required. The name of the enum field + value. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_enum_value_display_name (str): + Required. The new display name of the enum value. For + example, ``my_new_enum_value``. + + This corresponds to the ``new_enum_value_display_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.TagTemplateField: + The template for an individual field + within a tag template. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, new_enum_value_display_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.RenameTagTemplateFieldEnumValueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.RenameTagTemplateFieldEnumValueRequest): + request = datacatalog.RenameTagTemplateFieldEnumValueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if new_enum_value_display_name is not None: + request.new_enum_value_display_name = new_enum_value_display_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.rename_tag_template_field_enum_value] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_tag_template_field(self, + request: Optional[Union[datacatalog.DeleteTagTemplateFieldRequest, dict]] = None, + *, + name: Optional[str] = None, + force: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a field in a tag template and all uses of this field + from the tags based on this template. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see `Data + Catalog resource + project `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_delete_tag_template_field(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteTagTemplateFieldRequest( + name="name_value", + force=True, + ) + + # Make the request + client.delete_tag_template_field(request=request) + + Args: + request (Union[google.cloud.datacatalog_v1.types.DeleteTagTemplateFieldRequest, dict]): + The request object. Request message for + [DeleteTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.DeleteTagTemplateField]. + name (str): + Required. The name of the tag + template field to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + force (bool): + Required. If true, deletes this field from any tags that + use it. + + Currently, ``true`` is the only supported value. + + This corresponds to the ``force`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, force]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.DeleteTagTemplateFieldRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.DeleteTagTemplateFieldRequest): + request = datacatalog.DeleteTagTemplateFieldRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if force is not None: + request.force = force + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_tag_template_field] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_tag(self, + request: Optional[Union[datacatalog.CreateTagRequest, dict]] = None, + *, + parent: Optional[str] = None, + tag: Optional[tags.Tag] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.Tag: + r"""Creates a tag and assigns it to: + + - An [Entry][google.cloud.datacatalog.v1.Entry] if the method + name is + ``projects.locations.entryGroups.entries.tags.create``. + - Or [EntryGroup][google.cloud.datacatalog.v1.EntryGroup]if the + method name is + ``projects.locations.entryGroups.tags.create``. + + Note: The project identified by the ``parent`` parameter for the + [tag] + (https://cloud.google.com/data-catalog/docs/reference/rest/v1/projects.locations.entryGroups.entries.tags/create#path-parameters) + and the [tag template] + (https://cloud.google.com/data-catalog/docs/reference/rest/v1/projects.locations.tagTemplates/create#path-parameters) + used to create the tag must be in the same organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_create_tag(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + tag = datacatalog_v1.Tag() + tag.column = "column_value" + tag.template = "template_value" + + request = datacatalog_v1.CreateTagRequest( + parent="parent_value", + tag=tag, + ) + + # Make the request + response = client.create_tag(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.CreateTagRequest, dict]): + The request object. Request message for + [CreateTag][google.cloud.datacatalog.v1.DataCatalog.CreateTag]. + parent (str): + Required. The name of the resource to + attach this tag to. + Tags can be attached to entries or entry + groups. An entry can have up to 1000 + attached tags. + + Note: The tag and its child resources + might not be stored in the location + specified in its name. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag (google.cloud.datacatalog_v1.types.Tag): + Required. The tag to create. + This corresponds to the ``tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Tag: + Tags contain custom metadata and are attached to Data Catalog resources. Tags + conform with the specification of their tag template. + + See [Data Catalog + IAM](\ https://cloud.google.com/data-catalog/docs/concepts/iam) + for information on the permissions needed to create + or view tags. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, tag]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.CreateTagRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.CreateTagRequest): + request = datacatalog.CreateTagRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if tag is not None: + request.tag = tag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_tag] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_tag(self, + request: Optional[Union[datacatalog.UpdateTagRequest, dict]] = None, + *, + tag: Optional[tags.Tag] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.Tag: + r"""Updates an existing tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_update_tag(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + tag = datacatalog_v1.Tag() + tag.column = "column_value" + tag.template = "template_value" + + request = datacatalog_v1.UpdateTagRequest( + tag=tag, + ) + + # Make the request + response = client.update_tag(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.UpdateTagRequest, dict]): + The request object. Request message for + [UpdateTag][google.cloud.datacatalog.v1.DataCatalog.UpdateTag]. + tag (google.cloud.datacatalog_v1.types.Tag): + Required. The updated tag. The "name" + field must be set. + + This corresponds to the ``tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Names of fields whose values to overwrite on a tag. + Currently, a tag has the only modifiable field with the + name ``fields``. + + In general, if this parameter is absent or empty, all + modifiable fields are overwritten. If such fields are + non-required and omitted in the request body, their + values are emptied. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Tag: + Tags contain custom metadata and are attached to Data Catalog resources. Tags + conform with the specification of their tag template. + + See [Data Catalog + IAM](\ https://cloud.google.com/data-catalog/docs/concepts/iam) + for information on the permissions needed to create + or view tags. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([tag, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.UpdateTagRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.UpdateTagRequest): + request = datacatalog.UpdateTagRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if tag is not None: + request.tag = tag + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_tag] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("tag.name", request.tag.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_tag(self, + request: Optional[Union[datacatalog.DeleteTagRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_delete_tag(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteTagRequest( + name="name_value", + ) + + # Make the request + client.delete_tag(request=request) + + Args: + request (Union[google.cloud.datacatalog_v1.types.DeleteTagRequest, dict]): + The request object. Request message for + [DeleteTag][google.cloud.datacatalog.v1.DataCatalog.DeleteTag]. + name (str): + Required. The name of the tag to + delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.DeleteTagRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.DeleteTagRequest): + request = datacatalog.DeleteTagRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_tag] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_tags(self, + request: Optional[Union[datacatalog.ListTagsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTagsPager: + r"""Lists tags assigned to an + [Entry][google.cloud.datacatalog.v1.Entry]. The + [columns][google.cloud.datacatalog.v1.Tag.column] in the + response are lowercased. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_list_tags(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.ListTagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tags(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.ListTagsRequest, dict]): + The request object. Request message for + [ListTags][google.cloud.datacatalog.v1.DataCatalog.ListTags]. + parent (str): + Required. The name of the Data Catalog resource to list + the tags of. + + The resource can be an + [Entry][google.cloud.datacatalog.v1.Entry] or an + [EntryGroup][google.cloud.datacatalog.v1.EntryGroup] + (without ``/entries/{entries}`` at the end). + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.services.data_catalog.pagers.ListTagsPager: + Response message for + [ListTags][google.cloud.datacatalog.v1.DataCatalog.ListTags]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.ListTagsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.ListTagsRequest): + request = datacatalog.ListTagsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_tags] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTagsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def reconcile_tags(self, + request: Optional[Union[datacatalog.ReconcileTagsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""``ReconcileTags`` creates or updates a list of tags on the + entry. If the + [ReconcileTagsRequest.force_delete_missing][google.cloud.datacatalog.v1.ReconcileTagsRequest.force_delete_missing] + parameter is set, the operation deletes tags not included in the + input tag list. + + ``ReconcileTags`` returns a [long-running operation] + [google.longrunning.Operation] resource that can be queried with + [Operations.GetOperation][google.longrunning.Operations.GetOperation] + to return [ReconcileTagsMetadata] + [google.cloud.datacatalog.v1.ReconcileTagsMetadata] and a + [ReconcileTagsResponse] + [google.cloud.datacatalog.v1.ReconcileTagsResponse] message. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_reconcile_tags(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.ReconcileTagsRequest( + parent="parent_value", + tag_template="tag_template_value", + ) + + # Make the request + operation = client.reconcile_tags(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.ReconcileTagsRequest, dict]): + The request object. Request message for + [ReconcileTags][google.cloud.datacatalog.v1.DataCatalog.ReconcileTags]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.datacatalog_v1.types.ReconcileTagsResponse` [Long-running operation][google.longrunning.Operation] + response message returned by + [ReconcileTags][google.cloud.datacatalog.v1.DataCatalog.ReconcileTags]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.ReconcileTagsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.ReconcileTagsRequest): + request = datacatalog.ReconcileTagsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reconcile_tags] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + datacatalog.ReconcileTagsResponse, + metadata_type=datacatalog.ReconcileTagsMetadata, + ) + + # Done; return the response. + return response + + def star_entry(self, + request: Optional[Union[datacatalog.StarEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.StarEntryResponse: + r"""Marks an [Entry][google.cloud.datacatalog.v1.Entry] as starred + by the current user. Starring information is private to each + user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_star_entry(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.StarEntryRequest( + name="name_value", + ) + + # Make the request + response = client.star_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.StarEntryRequest, dict]): + The request object. Request message for + [StarEntry][google.cloud.datacatalog.v1.DataCatalog.StarEntry]. + name (str): + Required. The name of the entry to + mark as starred. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.StarEntryResponse: + Response message for + [StarEntry][google.cloud.datacatalog.v1.DataCatalog.StarEntry]. + Empty for now + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.StarEntryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.StarEntryRequest): + request = datacatalog.StarEntryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.star_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def unstar_entry(self, + request: Optional[Union[datacatalog.UnstarEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.UnstarEntryResponse: + r"""Marks an [Entry][google.cloud.datacatalog.v1.Entry] as NOT + starred by the current user. Starring information is private to + each user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_unstar_entry(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.UnstarEntryRequest( + name="name_value", + ) + + # Make the request + response = client.unstar_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.UnstarEntryRequest, dict]): + The request object. Request message for + [UnstarEntry][google.cloud.datacatalog.v1.DataCatalog.UnstarEntry]. + name (str): + Required. The name of the entry to mark as **not** + starred. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.UnstarEntryResponse: + Response message for + [UnstarEntry][google.cloud.datacatalog.v1.DataCatalog.UnstarEntry]. + Empty for now + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.UnstarEntryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.UnstarEntryRequest): + request = datacatalog.UnstarEntryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.unstar_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets an access control policy for a resource. Replaces any + existing policy. + + Supported resources are: + + - Tag templates + - Entry groups + + Note: This method sets policies only within Data Catalog and + can't be used to manage policies in BigQuery, Pub/Sub, Dataproc + Metastore, and any external Google Cloud Platform resources + synced with the Data Catalog. + + To call this method, you must have the following Google IAM + permissions: + + - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on + tag templates. + - ``datacatalog.entryGroups.setIamPolicy`` to set policies on + entry groups. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_set_iam_policy(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): + The request object. Request message for ``SetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.SetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a resource. + + May return: + + - A\ ``NOT_FOUND`` error if the resource doesn't exist or you + don't have the permission to view it. + - An empty policy if the resource exists but doesn't have a set + policy. + + Supported resources are: + + - Tag templates + - Entry groups + + Note: This method doesn't get policies from Google Cloud + Platform resources ingested into Data Catalog. + + To call this method, you must have the following Google IAM + permissions: + + - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on + tag templates. + - ``datacatalog.entryGroups.getIamPolicy`` to get policies on + entry groups. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_get_iam_policy(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): + The request object. Request message for ``GetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.GetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Gets your permissions on a resource. + + Returns an empty set of permissions if the resource + doesn't exist. + + Supported resources are: + + - Tag templates + - Entry groups + + Note: This method gets policies only within Data Catalog + and can't be used to get policies from BigQuery, + Pub/Sub, Dataproc Metastore, and any external Google + Cloud Platform resources ingested into Data Catalog. + + No Google IAM permissions are required to call this + method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_test_iam_permissions(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def import_entries(self, + request: Optional[Union[datacatalog.ImportEntriesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Imports entries from a source, such as data previously dumped + into a Cloud Storage bucket, into Data Catalog. Import of + entries is a sync operation that reconciles the state of the + third-party system with the Data Catalog. + + ``ImportEntries`` accepts source data snapshots of a third-party + system. Snapshot should be delivered as a .wire or + base65-encoded .txt file containing a sequence of Protocol + Buffer messages of + [DumpItem][google.cloud.datacatalog.v1.DumpItem] type. + + ``ImportEntries`` returns a [long-running operation] + [google.longrunning.Operation] resource that can be queried with + [Operations.GetOperation][google.longrunning.Operations.GetOperation] + to return + [ImportEntriesMetadata][google.cloud.datacatalog.v1.ImportEntriesMetadata] + and an + [ImportEntriesResponse][google.cloud.datacatalog.v1.ImportEntriesResponse] + message. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_import_entries(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.ImportEntriesRequest( + gcs_bucket_path="gcs_bucket_path_value", + parent="parent_value", + ) + + # Make the request + operation = client.import_entries(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.ImportEntriesRequest, dict]): + The request object. Request message for + [ImportEntries][google.cloud.datacatalog.v1.DataCatalog.ImportEntries] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.datacatalog_v1.types.ImportEntriesResponse` Response message for [long-running operation][google.longrunning.Operation] + returned by the + [ImportEntries][google.cloud.datacatalog.v1.DataCatalog.ImportEntries]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.ImportEntriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.ImportEntriesRequest): + request = datacatalog.ImportEntriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_entries] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + datacatalog.ImportEntriesResponse, + metadata_type=datacatalog.ImportEntriesMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DataCatalogClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "DataCatalogClient", +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/pagers.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/pagers.py new file mode 100644 index 000000000000..a9091c273cd1 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/pagers.py @@ -0,0 +1,504 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.datacatalog_v1.types import datacatalog +from google.cloud.datacatalog_v1.types import search +from google.cloud.datacatalog_v1.types import tags + + +class SearchCatalogPager: + """A pager for iterating through ``search_catalog`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1.types.SearchCatalogResponse` object, and + provides an ``__iter__`` method to iterate through its + ``results`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``SearchCatalog`` requests and continue to iterate + through the ``results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1.types.SearchCatalogResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., datacatalog.SearchCatalogResponse], + request: datacatalog.SearchCatalogRequest, + response: datacatalog.SearchCatalogResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1.types.SearchCatalogRequest): + The initial request object. + response (google.cloud.datacatalog_v1.types.SearchCatalogResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datacatalog.SearchCatalogRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[datacatalog.SearchCatalogResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[search.SearchCatalogResult]: + for page in self.pages: + yield from page.results + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class SearchCatalogAsyncPager: + """A pager for iterating through ``search_catalog`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1.types.SearchCatalogResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``results`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``SearchCatalog`` requests and continue to iterate + through the ``results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1.types.SearchCatalogResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[datacatalog.SearchCatalogResponse]], + request: datacatalog.SearchCatalogRequest, + response: datacatalog.SearchCatalogResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1.types.SearchCatalogRequest): + The initial request object. + response (google.cloud.datacatalog_v1.types.SearchCatalogResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datacatalog.SearchCatalogRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[datacatalog.SearchCatalogResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[search.SearchCatalogResult]: + async def async_generator(): + async for page in self.pages: + for response in page.results: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEntryGroupsPager: + """A pager for iterating through ``list_entry_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1.types.ListEntryGroupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``entry_groups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEntryGroups`` requests and continue to iterate + through the ``entry_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1.types.ListEntryGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., datacatalog.ListEntryGroupsResponse], + request: datacatalog.ListEntryGroupsRequest, + response: datacatalog.ListEntryGroupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1.types.ListEntryGroupsRequest): + The initial request object. + response (google.cloud.datacatalog_v1.types.ListEntryGroupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datacatalog.ListEntryGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[datacatalog.ListEntryGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[datacatalog.EntryGroup]: + for page in self.pages: + yield from page.entry_groups + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEntryGroupsAsyncPager: + """A pager for iterating through ``list_entry_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1.types.ListEntryGroupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``entry_groups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEntryGroups`` requests and continue to iterate + through the ``entry_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1.types.ListEntryGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[datacatalog.ListEntryGroupsResponse]], + request: datacatalog.ListEntryGroupsRequest, + response: datacatalog.ListEntryGroupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1.types.ListEntryGroupsRequest): + The initial request object. + response (google.cloud.datacatalog_v1.types.ListEntryGroupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datacatalog.ListEntryGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[datacatalog.ListEntryGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[datacatalog.EntryGroup]: + async def async_generator(): + async for page in self.pages: + for response in page.entry_groups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEntriesPager: + """A pager for iterating through ``list_entries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1.types.ListEntriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``entries`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEntries`` requests and continue to iterate + through the ``entries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1.types.ListEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., datacatalog.ListEntriesResponse], + request: datacatalog.ListEntriesRequest, + response: datacatalog.ListEntriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1.types.ListEntriesRequest): + The initial request object. + response (google.cloud.datacatalog_v1.types.ListEntriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datacatalog.ListEntriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[datacatalog.ListEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[datacatalog.Entry]: + for page in self.pages: + yield from page.entries + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEntriesAsyncPager: + """A pager for iterating through ``list_entries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1.types.ListEntriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``entries`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEntries`` requests and continue to iterate + through the ``entries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1.types.ListEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[datacatalog.ListEntriesResponse]], + request: datacatalog.ListEntriesRequest, + response: datacatalog.ListEntriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1.types.ListEntriesRequest): + The initial request object. + response (google.cloud.datacatalog_v1.types.ListEntriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datacatalog.ListEntriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[datacatalog.ListEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[datacatalog.Entry]: + async def async_generator(): + async for page in self.pages: + for response in page.entries: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTagsPager: + """A pager for iterating through ``list_tags`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1.types.ListTagsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``tags`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTags`` requests and continue to iterate + through the ``tags`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1.types.ListTagsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., datacatalog.ListTagsResponse], + request: datacatalog.ListTagsRequest, + response: datacatalog.ListTagsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1.types.ListTagsRequest): + The initial request object. + response (google.cloud.datacatalog_v1.types.ListTagsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datacatalog.ListTagsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[datacatalog.ListTagsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[tags.Tag]: + for page in self.pages: + yield from page.tags + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTagsAsyncPager: + """A pager for iterating through ``list_tags`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1.types.ListTagsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``tags`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTags`` requests and continue to iterate + through the ``tags`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1.types.ListTagsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[datacatalog.ListTagsResponse]], + request: datacatalog.ListTagsRequest, + response: datacatalog.ListTagsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1.types.ListTagsRequest): + The initial request object. + response (google.cloud.datacatalog_v1.types.ListTagsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datacatalog.ListTagsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[datacatalog.ListTagsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[tags.Tag]: + async def async_generator(): + async for page in self.pages: + for response in page.tags: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/__init__.py new file mode 100644 index 000000000000..8b4fbbf168be --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DataCatalogTransport +from .grpc import DataCatalogGrpcTransport +from .grpc_asyncio import DataCatalogGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DataCatalogTransport]] +_transport_registry['grpc'] = DataCatalogGrpcTransport +_transport_registry['grpc_asyncio'] = DataCatalogGrpcAsyncIOTransport + +__all__ = ( + 'DataCatalogTransport', + 'DataCatalogGrpcTransport', + 'DataCatalogGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py new file mode 100644 index 000000000000..b78071be0649 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py @@ -0,0 +1,657 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.datacatalog_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.datacatalog_v1.types import datacatalog +from google.cloud.datacatalog_v1.types import tags +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class DataCatalogTransport(abc.ABC): + """Abstract transport class for DataCatalog.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'datacatalog.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.search_catalog: gapic_v1.method.wrap_method( + self.search_catalog, + default_timeout=None, + client_info=client_info, + ), + self.create_entry_group: gapic_v1.method.wrap_method( + self.create_entry_group, + default_timeout=None, + client_info=client_info, + ), + self.get_entry_group: gapic_v1.method.wrap_method( + self.get_entry_group, + default_timeout=None, + client_info=client_info, + ), + self.update_entry_group: gapic_v1.method.wrap_method( + self.update_entry_group, + default_timeout=None, + client_info=client_info, + ), + self.delete_entry_group: gapic_v1.method.wrap_method( + self.delete_entry_group, + default_timeout=None, + client_info=client_info, + ), + self.list_entry_groups: gapic_v1.method.wrap_method( + self.list_entry_groups, + default_timeout=None, + client_info=client_info, + ), + self.create_entry: gapic_v1.method.wrap_method( + self.create_entry, + default_timeout=None, + client_info=client_info, + ), + self.update_entry: gapic_v1.method.wrap_method( + self.update_entry, + default_timeout=None, + client_info=client_info, + ), + self.delete_entry: gapic_v1.method.wrap_method( + self.delete_entry, + default_timeout=None, + client_info=client_info, + ), + self.get_entry: gapic_v1.method.wrap_method( + self.get_entry, + default_timeout=None, + client_info=client_info, + ), + self.lookup_entry: gapic_v1.method.wrap_method( + self.lookup_entry, + default_timeout=None, + client_info=client_info, + ), + self.list_entries: gapic_v1.method.wrap_method( + self.list_entries, + default_timeout=None, + client_info=client_info, + ), + self.modify_entry_overview: gapic_v1.method.wrap_method( + self.modify_entry_overview, + default_timeout=None, + client_info=client_info, + ), + self.modify_entry_contacts: gapic_v1.method.wrap_method( + self.modify_entry_contacts, + default_timeout=None, + client_info=client_info, + ), + self.create_tag_template: gapic_v1.method.wrap_method( + self.create_tag_template, + default_timeout=None, + client_info=client_info, + ), + self.get_tag_template: gapic_v1.method.wrap_method( + self.get_tag_template, + default_timeout=None, + client_info=client_info, + ), + self.update_tag_template: gapic_v1.method.wrap_method( + self.update_tag_template, + default_timeout=None, + client_info=client_info, + ), + self.delete_tag_template: gapic_v1.method.wrap_method( + self.delete_tag_template, + default_timeout=None, + client_info=client_info, + ), + self.create_tag_template_field: gapic_v1.method.wrap_method( + self.create_tag_template_field, + default_timeout=None, + client_info=client_info, + ), + self.update_tag_template_field: gapic_v1.method.wrap_method( + self.update_tag_template_field, + default_timeout=None, + client_info=client_info, + ), + self.rename_tag_template_field: gapic_v1.method.wrap_method( + self.rename_tag_template_field, + default_timeout=None, + client_info=client_info, + ), + self.rename_tag_template_field_enum_value: gapic_v1.method.wrap_method( + self.rename_tag_template_field_enum_value, + default_timeout=None, + client_info=client_info, + ), + self.delete_tag_template_field: gapic_v1.method.wrap_method( + self.delete_tag_template_field, + default_timeout=None, + client_info=client_info, + ), + self.create_tag: gapic_v1.method.wrap_method( + self.create_tag, + default_timeout=None, + client_info=client_info, + ), + self.update_tag: gapic_v1.method.wrap_method( + self.update_tag, + default_timeout=None, + client_info=client_info, + ), + self.delete_tag: gapic_v1.method.wrap_method( + self.delete_tag, + default_timeout=None, + client_info=client_info, + ), + self.list_tags: gapic_v1.method.wrap_method( + self.list_tags, + default_timeout=None, + client_info=client_info, + ), + self.reconcile_tags: gapic_v1.method.wrap_method( + self.reconcile_tags, + default_timeout=None, + client_info=client_info, + ), + self.star_entry: gapic_v1.method.wrap_method( + self.star_entry, + default_timeout=None, + client_info=client_info, + ), + self.unstar_entry: gapic_v1.method.wrap_method( + self.unstar_entry, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.import_entries: gapic_v1.method.wrap_method( + self.import_entries, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def search_catalog(self) -> Callable[ + [datacatalog.SearchCatalogRequest], + Union[ + datacatalog.SearchCatalogResponse, + Awaitable[datacatalog.SearchCatalogResponse] + ]]: + raise NotImplementedError() + + @property + def create_entry_group(self) -> Callable[ + [datacatalog.CreateEntryGroupRequest], + Union[ + datacatalog.EntryGroup, + Awaitable[datacatalog.EntryGroup] + ]]: + raise NotImplementedError() + + @property + def get_entry_group(self) -> Callable[ + [datacatalog.GetEntryGroupRequest], + Union[ + datacatalog.EntryGroup, + Awaitable[datacatalog.EntryGroup] + ]]: + raise NotImplementedError() + + @property + def update_entry_group(self) -> Callable[ + [datacatalog.UpdateEntryGroupRequest], + Union[ + datacatalog.EntryGroup, + Awaitable[datacatalog.EntryGroup] + ]]: + raise NotImplementedError() + + @property + def delete_entry_group(self) -> Callable[ + [datacatalog.DeleteEntryGroupRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_entry_groups(self) -> Callable[ + [datacatalog.ListEntryGroupsRequest], + Union[ + datacatalog.ListEntryGroupsResponse, + Awaitable[datacatalog.ListEntryGroupsResponse] + ]]: + raise NotImplementedError() + + @property + def create_entry(self) -> Callable[ + [datacatalog.CreateEntryRequest], + Union[ + datacatalog.Entry, + Awaitable[datacatalog.Entry] + ]]: + raise NotImplementedError() + + @property + def update_entry(self) -> Callable[ + [datacatalog.UpdateEntryRequest], + Union[ + datacatalog.Entry, + Awaitable[datacatalog.Entry] + ]]: + raise NotImplementedError() + + @property + def delete_entry(self) -> Callable[ + [datacatalog.DeleteEntryRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def get_entry(self) -> Callable[ + [datacatalog.GetEntryRequest], + Union[ + datacatalog.Entry, + Awaitable[datacatalog.Entry] + ]]: + raise NotImplementedError() + + @property + def lookup_entry(self) -> Callable[ + [datacatalog.LookupEntryRequest], + Union[ + datacatalog.Entry, + Awaitable[datacatalog.Entry] + ]]: + raise NotImplementedError() + + @property + def list_entries(self) -> Callable[ + [datacatalog.ListEntriesRequest], + Union[ + datacatalog.ListEntriesResponse, + Awaitable[datacatalog.ListEntriesResponse] + ]]: + raise NotImplementedError() + + @property + def modify_entry_overview(self) -> Callable[ + [datacatalog.ModifyEntryOverviewRequest], + Union[ + datacatalog.EntryOverview, + Awaitable[datacatalog.EntryOverview] + ]]: + raise NotImplementedError() + + @property + def modify_entry_contacts(self) -> Callable[ + [datacatalog.ModifyEntryContactsRequest], + Union[ + datacatalog.Contacts, + Awaitable[datacatalog.Contacts] + ]]: + raise NotImplementedError() + + @property + def create_tag_template(self) -> Callable[ + [datacatalog.CreateTagTemplateRequest], + Union[ + tags.TagTemplate, + Awaitable[tags.TagTemplate] + ]]: + raise NotImplementedError() + + @property + def get_tag_template(self) -> Callable[ + [datacatalog.GetTagTemplateRequest], + Union[ + tags.TagTemplate, + Awaitable[tags.TagTemplate] + ]]: + raise NotImplementedError() + + @property + def update_tag_template(self) -> Callable[ + [datacatalog.UpdateTagTemplateRequest], + Union[ + tags.TagTemplate, + Awaitable[tags.TagTemplate] + ]]: + raise NotImplementedError() + + @property + def delete_tag_template(self) -> Callable[ + [datacatalog.DeleteTagTemplateRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_tag_template_field(self) -> Callable[ + [datacatalog.CreateTagTemplateFieldRequest], + Union[ + tags.TagTemplateField, + Awaitable[tags.TagTemplateField] + ]]: + raise NotImplementedError() + + @property + def update_tag_template_field(self) -> Callable[ + [datacatalog.UpdateTagTemplateFieldRequest], + Union[ + tags.TagTemplateField, + Awaitable[tags.TagTemplateField] + ]]: + raise NotImplementedError() + + @property + def rename_tag_template_field(self) -> Callable[ + [datacatalog.RenameTagTemplateFieldRequest], + Union[ + tags.TagTemplateField, + Awaitable[tags.TagTemplateField] + ]]: + raise NotImplementedError() + + @property + def rename_tag_template_field_enum_value(self) -> Callable[ + [datacatalog.RenameTagTemplateFieldEnumValueRequest], + Union[ + tags.TagTemplateField, + Awaitable[tags.TagTemplateField] + ]]: + raise NotImplementedError() + + @property + def delete_tag_template_field(self) -> Callable[ + [datacatalog.DeleteTagTemplateFieldRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_tag(self) -> Callable[ + [datacatalog.CreateTagRequest], + Union[ + tags.Tag, + Awaitable[tags.Tag] + ]]: + raise NotImplementedError() + + @property + def update_tag(self) -> Callable[ + [datacatalog.UpdateTagRequest], + Union[ + tags.Tag, + Awaitable[tags.Tag] + ]]: + raise NotImplementedError() + + @property + def delete_tag(self) -> Callable[ + [datacatalog.DeleteTagRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_tags(self) -> Callable[ + [datacatalog.ListTagsRequest], + Union[ + datacatalog.ListTagsResponse, + Awaitable[datacatalog.ListTagsResponse] + ]]: + raise NotImplementedError() + + @property + def reconcile_tags(self) -> Callable[ + [datacatalog.ReconcileTagsRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def star_entry(self) -> Callable[ + [datacatalog.StarEntryRequest], + Union[ + datacatalog.StarEntryResponse, + Awaitable[datacatalog.StarEntryResponse] + ]]: + raise NotImplementedError() + + @property + def unstar_entry(self) -> Callable[ + [datacatalog.UnstarEntryRequest], + Union[ + datacatalog.UnstarEntryResponse, + Awaitable[datacatalog.UnstarEntryResponse] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def import_entries(self) -> Callable[ + [datacatalog.ImportEntriesRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'DataCatalogTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py new file mode 100644 index 000000000000..95e3e749eeac --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py @@ -0,0 +1,1469 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.datacatalog_v1.types import datacatalog +from google.cloud.datacatalog_v1.types import tags +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO + + +class DataCatalogGrpcTransport(DataCatalogTransport): + """gRPC backend transport for DataCatalog. + + Data Catalog API service allows you to discover, understand, + and manage your data. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def search_catalog(self) -> Callable[ + [datacatalog.SearchCatalogRequest], + datacatalog.SearchCatalogResponse]: + r"""Return a callable for the search catalog method over gRPC. + + Searches Data Catalog for multiple resources like entries and + tags that match a query. + + This is a [Custom Method] + (https://cloud.google.com/apis/design/custom_methods) that + doesn't return all information on a resource, only its ID and + high level fields. To get more information, you can subsequently + call specific get methods. + + Note: Data Catalog search queries don't guarantee full recall. + Results that match your query might not be returned, even in + subsequent result pages. Additionally, returned (and not + returned) results can vary if you repeat search queries. + + For more information, see [Data Catalog search syntax] + (https://cloud.google.com/data-catalog/docs/how-to/search-reference). + + Returns: + Callable[[~.SearchCatalogRequest], + ~.SearchCatalogResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'search_catalog' not in self._stubs: + self._stubs['search_catalog'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/SearchCatalog', + request_serializer=datacatalog.SearchCatalogRequest.serialize, + response_deserializer=datacatalog.SearchCatalogResponse.deserialize, + ) + return self._stubs['search_catalog'] + + @property + def create_entry_group(self) -> Callable[ + [datacatalog.CreateEntryGroupRequest], + datacatalog.EntryGroup]: + r"""Return a callable for the create entry group method over gRPC. + + Creates an entry group. + + An entry group contains logically related entries together with + `Cloud Identity and Access + Management `__ policies. These + policies specify users who can create, edit, and view entries + within entry groups. + + Data Catalog automatically creates entry groups with names that + start with the ``@`` symbol for the following resources: + + - BigQuery entries (``@bigquery``) + - Pub/Sub topics (``@pubsub``) + - Dataproc Metastore services + (``@dataproc_metastore_{SERVICE_NAME_HASH}``) + + You can create your own entry groups for Cloud Storage fileset + entries and custom entries together with the corresponding IAM + policies. User-created entry groups can't contain the ``@`` + symbol, it is reserved for automatically created groups. + + Entry groups, like entries, can be searched. + + A maximum of 10,000 entry groups may be created per organization + across all locations. + + You must enable the Data Catalog API in the project identified + by the ``parent`` parameter. For more information, see `Data + Catalog resource + project `__. + + Returns: + Callable[[~.CreateEntryGroupRequest], + ~.EntryGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entry_group' not in self._stubs: + self._stubs['create_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/CreateEntryGroup', + request_serializer=datacatalog.CreateEntryGroupRequest.serialize, + response_deserializer=datacatalog.EntryGroup.deserialize, + ) + return self._stubs['create_entry_group'] + + @property + def get_entry_group(self) -> Callable[ + [datacatalog.GetEntryGroupRequest], + datacatalog.EntryGroup]: + r"""Return a callable for the get entry group method over gRPC. + + Gets an entry group. + + Returns: + Callable[[~.GetEntryGroupRequest], + ~.EntryGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entry_group' not in self._stubs: + self._stubs['get_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/GetEntryGroup', + request_serializer=datacatalog.GetEntryGroupRequest.serialize, + response_deserializer=datacatalog.EntryGroup.deserialize, + ) + return self._stubs['get_entry_group'] + + @property + def update_entry_group(self) -> Callable[ + [datacatalog.UpdateEntryGroupRequest], + datacatalog.EntryGroup]: + r"""Return a callable for the update entry group method over gRPC. + + Updates an entry group. + + You must enable the Data Catalog API in the project identified + by the ``entry_group.name`` parameter. For more information, see + `Data Catalog resource + project `__. + + Returns: + Callable[[~.UpdateEntryGroupRequest], + ~.EntryGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entry_group' not in self._stubs: + self._stubs['update_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/UpdateEntryGroup', + request_serializer=datacatalog.UpdateEntryGroupRequest.serialize, + response_deserializer=datacatalog.EntryGroup.deserialize, + ) + return self._stubs['update_entry_group'] + + @property + def delete_entry_group(self) -> Callable[ + [datacatalog.DeleteEntryGroupRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete entry group method over gRPC. + + Deletes an entry group. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see `Data + Catalog resource + project `__. + + Returns: + Callable[[~.DeleteEntryGroupRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entry_group' not in self._stubs: + self._stubs['delete_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/DeleteEntryGroup', + request_serializer=datacatalog.DeleteEntryGroupRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_entry_group'] + + @property + def list_entry_groups(self) -> Callable[ + [datacatalog.ListEntryGroupsRequest], + datacatalog.ListEntryGroupsResponse]: + r"""Return a callable for the list entry groups method over gRPC. + + Lists entry groups. + + Returns: + Callable[[~.ListEntryGroupsRequest], + ~.ListEntryGroupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entry_groups' not in self._stubs: + self._stubs['list_entry_groups'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/ListEntryGroups', + request_serializer=datacatalog.ListEntryGroupsRequest.serialize, + response_deserializer=datacatalog.ListEntryGroupsResponse.deserialize, + ) + return self._stubs['list_entry_groups'] + + @property + def create_entry(self) -> Callable[ + [datacatalog.CreateEntryRequest], + datacatalog.Entry]: + r"""Return a callable for the create entry method over gRPC. + + Creates an entry. + + You can create entries only with 'FILESET', 'CLUSTER', + 'DATA_STREAM', or custom types. Data Catalog automatically + creates entries with other types during metadata ingestion from + integrated systems. + + You must enable the Data Catalog API in the project identified + by the ``parent`` parameter. For more information, see `Data + Catalog resource + project `__. + + An entry group can have a maximum of 100,000 entries. + + Returns: + Callable[[~.CreateEntryRequest], + ~.Entry]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entry' not in self._stubs: + self._stubs['create_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/CreateEntry', + request_serializer=datacatalog.CreateEntryRequest.serialize, + response_deserializer=datacatalog.Entry.deserialize, + ) + return self._stubs['create_entry'] + + @property + def update_entry(self) -> Callable[ + [datacatalog.UpdateEntryRequest], + datacatalog.Entry]: + r"""Return a callable for the update entry method over gRPC. + + Updates an existing entry. + + You must enable the Data Catalog API in the project identified + by the ``entry.name`` parameter. For more information, see `Data + Catalog resource + project `__. + + Returns: + Callable[[~.UpdateEntryRequest], + ~.Entry]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entry' not in self._stubs: + self._stubs['update_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/UpdateEntry', + request_serializer=datacatalog.UpdateEntryRequest.serialize, + response_deserializer=datacatalog.Entry.deserialize, + ) + return self._stubs['update_entry'] + + @property + def delete_entry(self) -> Callable[ + [datacatalog.DeleteEntryRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete entry method over gRPC. + + Deletes an existing entry. + + You can delete only the entries created by the + [CreateEntry][google.cloud.datacatalog.v1.DataCatalog.CreateEntry] + method. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see `Data + Catalog resource + project `__. + + Returns: + Callable[[~.DeleteEntryRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entry' not in self._stubs: + self._stubs['delete_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/DeleteEntry', + request_serializer=datacatalog.DeleteEntryRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_entry'] + + @property + def get_entry(self) -> Callable[ + [datacatalog.GetEntryRequest], + datacatalog.Entry]: + r"""Return a callable for the get entry method over gRPC. + + Gets an entry. + + Returns: + Callable[[~.GetEntryRequest], + ~.Entry]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entry' not in self._stubs: + self._stubs['get_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/GetEntry', + request_serializer=datacatalog.GetEntryRequest.serialize, + response_deserializer=datacatalog.Entry.deserialize, + ) + return self._stubs['get_entry'] + + @property + def lookup_entry(self) -> Callable[ + [datacatalog.LookupEntryRequest], + datacatalog.Entry]: + r"""Return a callable for the lookup entry method over gRPC. + + Gets an entry by its target resource name. + + The resource name comes from the source Google Cloud + Platform service. + + Returns: + Callable[[~.LookupEntryRequest], + ~.Entry]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'lookup_entry' not in self._stubs: + self._stubs['lookup_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/LookupEntry', + request_serializer=datacatalog.LookupEntryRequest.serialize, + response_deserializer=datacatalog.Entry.deserialize, + ) + return self._stubs['lookup_entry'] + + @property + def list_entries(self) -> Callable[ + [datacatalog.ListEntriesRequest], + datacatalog.ListEntriesResponse]: + r"""Return a callable for the list entries method over gRPC. + + Lists entries. + + Note: Currently, this method can list only custom entries. To + get a list of both custom and automatically created entries, use + [SearchCatalog][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog]. + + Returns: + Callable[[~.ListEntriesRequest], + ~.ListEntriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entries' not in self._stubs: + self._stubs['list_entries'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/ListEntries', + request_serializer=datacatalog.ListEntriesRequest.serialize, + response_deserializer=datacatalog.ListEntriesResponse.deserialize, + ) + return self._stubs['list_entries'] + + @property + def modify_entry_overview(self) -> Callable[ + [datacatalog.ModifyEntryOverviewRequest], + datacatalog.EntryOverview]: + r"""Return a callable for the modify entry overview method over gRPC. + + Modifies entry overview, part of the business context of an + [Entry][google.cloud.datacatalog.v1.Entry]. + + To call this method, you must have the + ``datacatalog.entries.updateOverview`` IAM permission on the + corresponding project. + + Returns: + Callable[[~.ModifyEntryOverviewRequest], + ~.EntryOverview]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'modify_entry_overview' not in self._stubs: + self._stubs['modify_entry_overview'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/ModifyEntryOverview', + request_serializer=datacatalog.ModifyEntryOverviewRequest.serialize, + response_deserializer=datacatalog.EntryOverview.deserialize, + ) + return self._stubs['modify_entry_overview'] + + @property + def modify_entry_contacts(self) -> Callable[ + [datacatalog.ModifyEntryContactsRequest], + datacatalog.Contacts]: + r"""Return a callable for the modify entry contacts method over gRPC. + + Modifies contacts, part of the business context of an + [Entry][google.cloud.datacatalog.v1.Entry]. + + To call this method, you must have the + ``datacatalog.entries.updateContacts`` IAM permission on the + corresponding project. + + Returns: + Callable[[~.ModifyEntryContactsRequest], + ~.Contacts]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'modify_entry_contacts' not in self._stubs: + self._stubs['modify_entry_contacts'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/ModifyEntryContacts', + request_serializer=datacatalog.ModifyEntryContactsRequest.serialize, + response_deserializer=datacatalog.Contacts.deserialize, + ) + return self._stubs['modify_entry_contacts'] + + @property + def create_tag_template(self) -> Callable[ + [datacatalog.CreateTagTemplateRequest], + tags.TagTemplate]: + r"""Return a callable for the create tag template method over gRPC. + + Creates a tag template. + + You must enable the Data Catalog API in the project identified + by the ``parent`` parameter. For more information, see [Data + Catalog resource project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project). + + Returns: + Callable[[~.CreateTagTemplateRequest], + ~.TagTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_tag_template' not in self._stubs: + self._stubs['create_tag_template'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/CreateTagTemplate', + request_serializer=datacatalog.CreateTagTemplateRequest.serialize, + response_deserializer=tags.TagTemplate.deserialize, + ) + return self._stubs['create_tag_template'] + + @property + def get_tag_template(self) -> Callable[ + [datacatalog.GetTagTemplateRequest], + tags.TagTemplate]: + r"""Return a callable for the get tag template method over gRPC. + + Gets a tag template. + + Returns: + Callable[[~.GetTagTemplateRequest], + ~.TagTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_tag_template' not in self._stubs: + self._stubs['get_tag_template'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/GetTagTemplate', + request_serializer=datacatalog.GetTagTemplateRequest.serialize, + response_deserializer=tags.TagTemplate.deserialize, + ) + return self._stubs['get_tag_template'] + + @property + def update_tag_template(self) -> Callable[ + [datacatalog.UpdateTagTemplateRequest], + tags.TagTemplate]: + r"""Return a callable for the update tag template method over gRPC. + + Updates a tag template. + + You can't update template fields with this method. These fields + are separate resources with their own create, update, and delete + methods. + + You must enable the Data Catalog API in the project identified + by the ``tag_template.name`` parameter. For more information, + see `Data Catalog resource + project `__. + + Returns: + Callable[[~.UpdateTagTemplateRequest], + ~.TagTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_tag_template' not in self._stubs: + self._stubs['update_tag_template'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/UpdateTagTemplate', + request_serializer=datacatalog.UpdateTagTemplateRequest.serialize, + response_deserializer=tags.TagTemplate.deserialize, + ) + return self._stubs['update_tag_template'] + + @property + def delete_tag_template(self) -> Callable[ + [datacatalog.DeleteTagTemplateRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete tag template method over gRPC. + + Deletes a tag template and all tags that use it. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see `Data + Catalog resource + project `__. + + Returns: + Callable[[~.DeleteTagTemplateRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_tag_template' not in self._stubs: + self._stubs['delete_tag_template'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/DeleteTagTemplate', + request_serializer=datacatalog.DeleteTagTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_tag_template'] + + @property + def create_tag_template_field(self) -> Callable[ + [datacatalog.CreateTagTemplateFieldRequest], + tags.TagTemplateField]: + r"""Return a callable for the create tag template field method over gRPC. + + Creates a field in a tag template. + + You must enable the Data Catalog API in the project identified + by the ``parent`` parameter. For more information, see `Data + Catalog resource + project `__. + + Returns: + Callable[[~.CreateTagTemplateFieldRequest], + ~.TagTemplateField]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_tag_template_field' not in self._stubs: + self._stubs['create_tag_template_field'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/CreateTagTemplateField', + request_serializer=datacatalog.CreateTagTemplateFieldRequest.serialize, + response_deserializer=tags.TagTemplateField.deserialize, + ) + return self._stubs['create_tag_template_field'] + + @property + def update_tag_template_field(self) -> Callable[ + [datacatalog.UpdateTagTemplateFieldRequest], + tags.TagTemplateField]: + r"""Return a callable for the update tag template field method over gRPC. + + Updates a field in a tag template. + + You can't update the field type with this method. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see `Data + Catalog resource + project `__. + + Returns: + Callable[[~.UpdateTagTemplateFieldRequest], + ~.TagTemplateField]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_tag_template_field' not in self._stubs: + self._stubs['update_tag_template_field'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/UpdateTagTemplateField', + request_serializer=datacatalog.UpdateTagTemplateFieldRequest.serialize, + response_deserializer=tags.TagTemplateField.deserialize, + ) + return self._stubs['update_tag_template_field'] + + @property + def rename_tag_template_field(self) -> Callable[ + [datacatalog.RenameTagTemplateFieldRequest], + tags.TagTemplateField]: + r"""Return a callable for the rename tag template field method over gRPC. + + Renames a field in a tag template. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see [Data + Catalog resource project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project). + + Returns: + Callable[[~.RenameTagTemplateFieldRequest], + ~.TagTemplateField]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'rename_tag_template_field' not in self._stubs: + self._stubs['rename_tag_template_field'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/RenameTagTemplateField', + request_serializer=datacatalog.RenameTagTemplateFieldRequest.serialize, + response_deserializer=tags.TagTemplateField.deserialize, + ) + return self._stubs['rename_tag_template_field'] + + @property + def rename_tag_template_field_enum_value(self) -> Callable[ + [datacatalog.RenameTagTemplateFieldEnumValueRequest], + tags.TagTemplateField]: + r"""Return a callable for the rename tag template field enum + value method over gRPC. + + Renames an enum value in a tag template. + + Within a single enum field, enum values must be unique. + + Returns: + Callable[[~.RenameTagTemplateFieldEnumValueRequest], + ~.TagTemplateField]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'rename_tag_template_field_enum_value' not in self._stubs: + self._stubs['rename_tag_template_field_enum_value'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/RenameTagTemplateFieldEnumValue', + request_serializer=datacatalog.RenameTagTemplateFieldEnumValueRequest.serialize, + response_deserializer=tags.TagTemplateField.deserialize, + ) + return self._stubs['rename_tag_template_field_enum_value'] + + @property + def delete_tag_template_field(self) -> Callable[ + [datacatalog.DeleteTagTemplateFieldRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete tag template field method over gRPC. + + Deletes a field in a tag template and all uses of this field + from the tags based on this template. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see `Data + Catalog resource + project `__. + + Returns: + Callable[[~.DeleteTagTemplateFieldRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_tag_template_field' not in self._stubs: + self._stubs['delete_tag_template_field'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/DeleteTagTemplateField', + request_serializer=datacatalog.DeleteTagTemplateFieldRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_tag_template_field'] + + @property + def create_tag(self) -> Callable[ + [datacatalog.CreateTagRequest], + tags.Tag]: + r"""Return a callable for the create tag method over gRPC. + + Creates a tag and assigns it to: + + - An [Entry][google.cloud.datacatalog.v1.Entry] if the method + name is + ``projects.locations.entryGroups.entries.tags.create``. + - Or [EntryGroup][google.cloud.datacatalog.v1.EntryGroup]if the + method name is + ``projects.locations.entryGroups.tags.create``. + + Note: The project identified by the ``parent`` parameter for the + [tag] + (https://cloud.google.com/data-catalog/docs/reference/rest/v1/projects.locations.entryGroups.entries.tags/create#path-parameters) + and the [tag template] + (https://cloud.google.com/data-catalog/docs/reference/rest/v1/projects.locations.tagTemplates/create#path-parameters) + used to create the tag must be in the same organization. + + Returns: + Callable[[~.CreateTagRequest], + ~.Tag]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_tag' not in self._stubs: + self._stubs['create_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/CreateTag', + request_serializer=datacatalog.CreateTagRequest.serialize, + response_deserializer=tags.Tag.deserialize, + ) + return self._stubs['create_tag'] + + @property + def update_tag(self) -> Callable[ + [datacatalog.UpdateTagRequest], + tags.Tag]: + r"""Return a callable for the update tag method over gRPC. + + Updates an existing tag. + + Returns: + Callable[[~.UpdateTagRequest], + ~.Tag]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_tag' not in self._stubs: + self._stubs['update_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/UpdateTag', + request_serializer=datacatalog.UpdateTagRequest.serialize, + response_deserializer=tags.Tag.deserialize, + ) + return self._stubs['update_tag'] + + @property + def delete_tag(self) -> Callable[ + [datacatalog.DeleteTagRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete tag method over gRPC. + + Deletes a tag. + + Returns: + Callable[[~.DeleteTagRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_tag' not in self._stubs: + self._stubs['delete_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/DeleteTag', + request_serializer=datacatalog.DeleteTagRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_tag'] + + @property + def list_tags(self) -> Callable[ + [datacatalog.ListTagsRequest], + datacatalog.ListTagsResponse]: + r"""Return a callable for the list tags method over gRPC. + + Lists tags assigned to an + [Entry][google.cloud.datacatalog.v1.Entry]. The + [columns][google.cloud.datacatalog.v1.Tag.column] in the + response are lowercased. + + Returns: + Callable[[~.ListTagsRequest], + ~.ListTagsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_tags' not in self._stubs: + self._stubs['list_tags'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/ListTags', + request_serializer=datacatalog.ListTagsRequest.serialize, + response_deserializer=datacatalog.ListTagsResponse.deserialize, + ) + return self._stubs['list_tags'] + + @property + def reconcile_tags(self) -> Callable[ + [datacatalog.ReconcileTagsRequest], + operations_pb2.Operation]: + r"""Return a callable for the reconcile tags method over gRPC. + + ``ReconcileTags`` creates or updates a list of tags on the + entry. If the + [ReconcileTagsRequest.force_delete_missing][google.cloud.datacatalog.v1.ReconcileTagsRequest.force_delete_missing] + parameter is set, the operation deletes tags not included in the + input tag list. + + ``ReconcileTags`` returns a [long-running operation] + [google.longrunning.Operation] resource that can be queried with + [Operations.GetOperation][google.longrunning.Operations.GetOperation] + to return [ReconcileTagsMetadata] + [google.cloud.datacatalog.v1.ReconcileTagsMetadata] and a + [ReconcileTagsResponse] + [google.cloud.datacatalog.v1.ReconcileTagsResponse] message. + + Returns: + Callable[[~.ReconcileTagsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'reconcile_tags' not in self._stubs: + self._stubs['reconcile_tags'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/ReconcileTags', + request_serializer=datacatalog.ReconcileTagsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['reconcile_tags'] + + @property + def star_entry(self) -> Callable[ + [datacatalog.StarEntryRequest], + datacatalog.StarEntryResponse]: + r"""Return a callable for the star entry method over gRPC. + + Marks an [Entry][google.cloud.datacatalog.v1.Entry] as starred + by the current user. Starring information is private to each + user. + + Returns: + Callable[[~.StarEntryRequest], + ~.StarEntryResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'star_entry' not in self._stubs: + self._stubs['star_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/StarEntry', + request_serializer=datacatalog.StarEntryRequest.serialize, + response_deserializer=datacatalog.StarEntryResponse.deserialize, + ) + return self._stubs['star_entry'] + + @property + def unstar_entry(self) -> Callable[ + [datacatalog.UnstarEntryRequest], + datacatalog.UnstarEntryResponse]: + r"""Return a callable for the unstar entry method over gRPC. + + Marks an [Entry][google.cloud.datacatalog.v1.Entry] as NOT + starred by the current user. Starring information is private to + each user. + + Returns: + Callable[[~.UnstarEntryRequest], + ~.UnstarEntryResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'unstar_entry' not in self._stubs: + self._stubs['unstar_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/UnstarEntry', + request_serializer=datacatalog.UnstarEntryRequest.serialize, + response_deserializer=datacatalog.UnstarEntryResponse.deserialize, + ) + return self._stubs['unstar_entry'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets an access control policy for a resource. Replaces any + existing policy. + + Supported resources are: + + - Tag templates + - Entry groups + + Note: This method sets policies only within Data Catalog and + can't be used to manage policies in BigQuery, Pub/Sub, Dataproc + Metastore, and any external Google Cloud Platform resources + synced with the Data Catalog. + + To call this method, you must have the following Google IAM + permissions: + + - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on + tag templates. + - ``datacatalog.entryGroups.setIamPolicy`` to set policies on + entry groups. + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a resource. + + May return: + + - A\ ``NOT_FOUND`` error if the resource doesn't exist or you + don't have the permission to view it. + - An empty policy if the resource exists but doesn't have a set + policy. + + Supported resources are: + + - Tag templates + - Entry groups + + Note: This method doesn't get policies from Google Cloud + Platform resources ingested into Data Catalog. + + To call this method, you must have the following Google IAM + permissions: + + - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on + tag templates. + - ``datacatalog.entryGroups.getIamPolicy`` to get policies on + entry groups. + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse]: + r"""Return a callable for the test iam permissions method over gRPC. + + Gets your permissions on a resource. + + Returns an empty set of permissions if the resource + doesn't exist. + + Supported resources are: + + - Tag templates + - Entry groups + + Note: This method gets policies only within Data Catalog + and can't be used to get policies from BigQuery, + Pub/Sub, Dataproc Metastore, and any external Google + Cloud Platform resources ingested into Data Catalog. + + No Google IAM permissions are required to call this + method. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + @property + def import_entries(self) -> Callable[ + [datacatalog.ImportEntriesRequest], + operations_pb2.Operation]: + r"""Return a callable for the import entries method over gRPC. + + Imports entries from a source, such as data previously dumped + into a Cloud Storage bucket, into Data Catalog. Import of + entries is a sync operation that reconciles the state of the + third-party system with the Data Catalog. + + ``ImportEntries`` accepts source data snapshots of a third-party + system. Snapshot should be delivered as a .wire or + base65-encoded .txt file containing a sequence of Protocol + Buffer messages of + [DumpItem][google.cloud.datacatalog.v1.DumpItem] type. + + ``ImportEntries`` returns a [long-running operation] + [google.longrunning.Operation] resource that can be queried with + [Operations.GetOperation][google.longrunning.Operations.GetOperation] + to return + [ImportEntriesMetadata][google.cloud.datacatalog.v1.ImportEntriesMetadata] + and an + [ImportEntriesResponse][google.cloud.datacatalog.v1.ImportEntriesResponse] + message. + + Returns: + Callable[[~.ImportEntriesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'import_entries' not in self._stubs: + self._stubs['import_entries'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/ImportEntries', + request_serializer=datacatalog.ImportEntriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['import_entries'] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'DataCatalogGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py new file mode 100644 index 000000000000..83b1800e7811 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py @@ -0,0 +1,1468 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.datacatalog_v1.types import datacatalog +from google.cloud.datacatalog_v1.types import tags +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO +from .grpc import DataCatalogGrpcTransport + + +class DataCatalogGrpcAsyncIOTransport(DataCatalogTransport): + """gRPC AsyncIO backend transport for DataCatalog. + + Data Catalog API service allows you to discover, understand, + and manage your data. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def search_catalog(self) -> Callable[ + [datacatalog.SearchCatalogRequest], + Awaitable[datacatalog.SearchCatalogResponse]]: + r"""Return a callable for the search catalog method over gRPC. + + Searches Data Catalog for multiple resources like entries and + tags that match a query. + + This is a [Custom Method] + (https://cloud.google.com/apis/design/custom_methods) that + doesn't return all information on a resource, only its ID and + high level fields. To get more information, you can subsequently + call specific get methods. + + Note: Data Catalog search queries don't guarantee full recall. + Results that match your query might not be returned, even in + subsequent result pages. Additionally, returned (and not + returned) results can vary if you repeat search queries. + + For more information, see [Data Catalog search syntax] + (https://cloud.google.com/data-catalog/docs/how-to/search-reference). + + Returns: + Callable[[~.SearchCatalogRequest], + Awaitable[~.SearchCatalogResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'search_catalog' not in self._stubs: + self._stubs['search_catalog'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/SearchCatalog', + request_serializer=datacatalog.SearchCatalogRequest.serialize, + response_deserializer=datacatalog.SearchCatalogResponse.deserialize, + ) + return self._stubs['search_catalog'] + + @property + def create_entry_group(self) -> Callable[ + [datacatalog.CreateEntryGroupRequest], + Awaitable[datacatalog.EntryGroup]]: + r"""Return a callable for the create entry group method over gRPC. + + Creates an entry group. + + An entry group contains logically related entries together with + `Cloud Identity and Access + Management `__ policies. These + policies specify users who can create, edit, and view entries + within entry groups. + + Data Catalog automatically creates entry groups with names that + start with the ``@`` symbol for the following resources: + + - BigQuery entries (``@bigquery``) + - Pub/Sub topics (``@pubsub``) + - Dataproc Metastore services + (``@dataproc_metastore_{SERVICE_NAME_HASH}``) + + You can create your own entry groups for Cloud Storage fileset + entries and custom entries together with the corresponding IAM + policies. User-created entry groups can't contain the ``@`` + symbol, it is reserved for automatically created groups. + + Entry groups, like entries, can be searched. + + A maximum of 10,000 entry groups may be created per organization + across all locations. + + You must enable the Data Catalog API in the project identified + by the ``parent`` parameter. For more information, see `Data + Catalog resource + project `__. + + Returns: + Callable[[~.CreateEntryGroupRequest], + Awaitable[~.EntryGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entry_group' not in self._stubs: + self._stubs['create_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/CreateEntryGroup', + request_serializer=datacatalog.CreateEntryGroupRequest.serialize, + response_deserializer=datacatalog.EntryGroup.deserialize, + ) + return self._stubs['create_entry_group'] + + @property + def get_entry_group(self) -> Callable[ + [datacatalog.GetEntryGroupRequest], + Awaitable[datacatalog.EntryGroup]]: + r"""Return a callable for the get entry group method over gRPC. + + Gets an entry group. + + Returns: + Callable[[~.GetEntryGroupRequest], + Awaitable[~.EntryGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entry_group' not in self._stubs: + self._stubs['get_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/GetEntryGroup', + request_serializer=datacatalog.GetEntryGroupRequest.serialize, + response_deserializer=datacatalog.EntryGroup.deserialize, + ) + return self._stubs['get_entry_group'] + + @property + def update_entry_group(self) -> Callable[ + [datacatalog.UpdateEntryGroupRequest], + Awaitable[datacatalog.EntryGroup]]: + r"""Return a callable for the update entry group method over gRPC. + + Updates an entry group. + + You must enable the Data Catalog API in the project identified + by the ``entry_group.name`` parameter. For more information, see + `Data Catalog resource + project `__. + + Returns: + Callable[[~.UpdateEntryGroupRequest], + Awaitable[~.EntryGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entry_group' not in self._stubs: + self._stubs['update_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/UpdateEntryGroup', + request_serializer=datacatalog.UpdateEntryGroupRequest.serialize, + response_deserializer=datacatalog.EntryGroup.deserialize, + ) + return self._stubs['update_entry_group'] + + @property + def delete_entry_group(self) -> Callable[ + [datacatalog.DeleteEntryGroupRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete entry group method over gRPC. + + Deletes an entry group. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see `Data + Catalog resource + project `__. + + Returns: + Callable[[~.DeleteEntryGroupRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entry_group' not in self._stubs: + self._stubs['delete_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/DeleteEntryGroup', + request_serializer=datacatalog.DeleteEntryGroupRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_entry_group'] + + @property + def list_entry_groups(self) -> Callable[ + [datacatalog.ListEntryGroupsRequest], + Awaitable[datacatalog.ListEntryGroupsResponse]]: + r"""Return a callable for the list entry groups method over gRPC. + + Lists entry groups. + + Returns: + Callable[[~.ListEntryGroupsRequest], + Awaitable[~.ListEntryGroupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entry_groups' not in self._stubs: + self._stubs['list_entry_groups'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/ListEntryGroups', + request_serializer=datacatalog.ListEntryGroupsRequest.serialize, + response_deserializer=datacatalog.ListEntryGroupsResponse.deserialize, + ) + return self._stubs['list_entry_groups'] + + @property + def create_entry(self) -> Callable[ + [datacatalog.CreateEntryRequest], + Awaitable[datacatalog.Entry]]: + r"""Return a callable for the create entry method over gRPC. + + Creates an entry. + + You can create entries only with 'FILESET', 'CLUSTER', + 'DATA_STREAM', or custom types. Data Catalog automatically + creates entries with other types during metadata ingestion from + integrated systems. + + You must enable the Data Catalog API in the project identified + by the ``parent`` parameter. For more information, see `Data + Catalog resource + project `__. + + An entry group can have a maximum of 100,000 entries. + + Returns: + Callable[[~.CreateEntryRequest], + Awaitable[~.Entry]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entry' not in self._stubs: + self._stubs['create_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/CreateEntry', + request_serializer=datacatalog.CreateEntryRequest.serialize, + response_deserializer=datacatalog.Entry.deserialize, + ) + return self._stubs['create_entry'] + + @property + def update_entry(self) -> Callable[ + [datacatalog.UpdateEntryRequest], + Awaitable[datacatalog.Entry]]: + r"""Return a callable for the update entry method over gRPC. + + Updates an existing entry. + + You must enable the Data Catalog API in the project identified + by the ``entry.name`` parameter. For more information, see `Data + Catalog resource + project `__. + + Returns: + Callable[[~.UpdateEntryRequest], + Awaitable[~.Entry]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entry' not in self._stubs: + self._stubs['update_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/UpdateEntry', + request_serializer=datacatalog.UpdateEntryRequest.serialize, + response_deserializer=datacatalog.Entry.deserialize, + ) + return self._stubs['update_entry'] + + @property + def delete_entry(self) -> Callable[ + [datacatalog.DeleteEntryRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete entry method over gRPC. + + Deletes an existing entry. + + You can delete only the entries created by the + [CreateEntry][google.cloud.datacatalog.v1.DataCatalog.CreateEntry] + method. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see `Data + Catalog resource + project `__. + + Returns: + Callable[[~.DeleteEntryRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entry' not in self._stubs: + self._stubs['delete_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/DeleteEntry', + request_serializer=datacatalog.DeleteEntryRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_entry'] + + @property + def get_entry(self) -> Callable[ + [datacatalog.GetEntryRequest], + Awaitable[datacatalog.Entry]]: + r"""Return a callable for the get entry method over gRPC. + + Gets an entry. + + Returns: + Callable[[~.GetEntryRequest], + Awaitable[~.Entry]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entry' not in self._stubs: + self._stubs['get_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/GetEntry', + request_serializer=datacatalog.GetEntryRequest.serialize, + response_deserializer=datacatalog.Entry.deserialize, + ) + return self._stubs['get_entry'] + + @property + def lookup_entry(self) -> Callable[ + [datacatalog.LookupEntryRequest], + Awaitable[datacatalog.Entry]]: + r"""Return a callable for the lookup entry method over gRPC. + + Gets an entry by its target resource name. + + The resource name comes from the source Google Cloud + Platform service. + + Returns: + Callable[[~.LookupEntryRequest], + Awaitable[~.Entry]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'lookup_entry' not in self._stubs: + self._stubs['lookup_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/LookupEntry', + request_serializer=datacatalog.LookupEntryRequest.serialize, + response_deserializer=datacatalog.Entry.deserialize, + ) + return self._stubs['lookup_entry'] + + @property + def list_entries(self) -> Callable[ + [datacatalog.ListEntriesRequest], + Awaitable[datacatalog.ListEntriesResponse]]: + r"""Return a callable for the list entries method over gRPC. + + Lists entries. + + Note: Currently, this method can list only custom entries. To + get a list of both custom and automatically created entries, use + [SearchCatalog][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog]. + + Returns: + Callable[[~.ListEntriesRequest], + Awaitable[~.ListEntriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entries' not in self._stubs: + self._stubs['list_entries'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/ListEntries', + request_serializer=datacatalog.ListEntriesRequest.serialize, + response_deserializer=datacatalog.ListEntriesResponse.deserialize, + ) + return self._stubs['list_entries'] + + @property + def modify_entry_overview(self) -> Callable[ + [datacatalog.ModifyEntryOverviewRequest], + Awaitable[datacatalog.EntryOverview]]: + r"""Return a callable for the modify entry overview method over gRPC. + + Modifies entry overview, part of the business context of an + [Entry][google.cloud.datacatalog.v1.Entry]. + + To call this method, you must have the + ``datacatalog.entries.updateOverview`` IAM permission on the + corresponding project. + + Returns: + Callable[[~.ModifyEntryOverviewRequest], + Awaitable[~.EntryOverview]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'modify_entry_overview' not in self._stubs: + self._stubs['modify_entry_overview'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/ModifyEntryOverview', + request_serializer=datacatalog.ModifyEntryOverviewRequest.serialize, + response_deserializer=datacatalog.EntryOverview.deserialize, + ) + return self._stubs['modify_entry_overview'] + + @property + def modify_entry_contacts(self) -> Callable[ + [datacatalog.ModifyEntryContactsRequest], + Awaitable[datacatalog.Contacts]]: + r"""Return a callable for the modify entry contacts method over gRPC. + + Modifies contacts, part of the business context of an + [Entry][google.cloud.datacatalog.v1.Entry]. + + To call this method, you must have the + ``datacatalog.entries.updateContacts`` IAM permission on the + corresponding project. + + Returns: + Callable[[~.ModifyEntryContactsRequest], + Awaitable[~.Contacts]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'modify_entry_contacts' not in self._stubs: + self._stubs['modify_entry_contacts'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/ModifyEntryContacts', + request_serializer=datacatalog.ModifyEntryContactsRequest.serialize, + response_deserializer=datacatalog.Contacts.deserialize, + ) + return self._stubs['modify_entry_contacts'] + + @property + def create_tag_template(self) -> Callable[ + [datacatalog.CreateTagTemplateRequest], + Awaitable[tags.TagTemplate]]: + r"""Return a callable for the create tag template method over gRPC. + + Creates a tag template. + + You must enable the Data Catalog API in the project identified + by the ``parent`` parameter. For more information, see [Data + Catalog resource project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project). + + Returns: + Callable[[~.CreateTagTemplateRequest], + Awaitable[~.TagTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_tag_template' not in self._stubs: + self._stubs['create_tag_template'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/CreateTagTemplate', + request_serializer=datacatalog.CreateTagTemplateRequest.serialize, + response_deserializer=tags.TagTemplate.deserialize, + ) + return self._stubs['create_tag_template'] + + @property + def get_tag_template(self) -> Callable[ + [datacatalog.GetTagTemplateRequest], + Awaitable[tags.TagTemplate]]: + r"""Return a callable for the get tag template method over gRPC. + + Gets a tag template. + + Returns: + Callable[[~.GetTagTemplateRequest], + Awaitable[~.TagTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_tag_template' not in self._stubs: + self._stubs['get_tag_template'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/GetTagTemplate', + request_serializer=datacatalog.GetTagTemplateRequest.serialize, + response_deserializer=tags.TagTemplate.deserialize, + ) + return self._stubs['get_tag_template'] + + @property + def update_tag_template(self) -> Callable[ + [datacatalog.UpdateTagTemplateRequest], + Awaitable[tags.TagTemplate]]: + r"""Return a callable for the update tag template method over gRPC. + + Updates a tag template. + + You can't update template fields with this method. These fields + are separate resources with their own create, update, and delete + methods. + + You must enable the Data Catalog API in the project identified + by the ``tag_template.name`` parameter. For more information, + see `Data Catalog resource + project `__. + + Returns: + Callable[[~.UpdateTagTemplateRequest], + Awaitable[~.TagTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_tag_template' not in self._stubs: + self._stubs['update_tag_template'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/UpdateTagTemplate', + request_serializer=datacatalog.UpdateTagTemplateRequest.serialize, + response_deserializer=tags.TagTemplate.deserialize, + ) + return self._stubs['update_tag_template'] + + @property + def delete_tag_template(self) -> Callable[ + [datacatalog.DeleteTagTemplateRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete tag template method over gRPC. + + Deletes a tag template and all tags that use it. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see `Data + Catalog resource + project `__. + + Returns: + Callable[[~.DeleteTagTemplateRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_tag_template' not in self._stubs: + self._stubs['delete_tag_template'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/DeleteTagTemplate', + request_serializer=datacatalog.DeleteTagTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_tag_template'] + + @property + def create_tag_template_field(self) -> Callable[ + [datacatalog.CreateTagTemplateFieldRequest], + Awaitable[tags.TagTemplateField]]: + r"""Return a callable for the create tag template field method over gRPC. + + Creates a field in a tag template. + + You must enable the Data Catalog API in the project identified + by the ``parent`` parameter. For more information, see `Data + Catalog resource + project `__. + + Returns: + Callable[[~.CreateTagTemplateFieldRequest], + Awaitable[~.TagTemplateField]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_tag_template_field' not in self._stubs: + self._stubs['create_tag_template_field'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/CreateTagTemplateField', + request_serializer=datacatalog.CreateTagTemplateFieldRequest.serialize, + response_deserializer=tags.TagTemplateField.deserialize, + ) + return self._stubs['create_tag_template_field'] + + @property + def update_tag_template_field(self) -> Callable[ + [datacatalog.UpdateTagTemplateFieldRequest], + Awaitable[tags.TagTemplateField]]: + r"""Return a callable for the update tag template field method over gRPC. + + Updates a field in a tag template. + + You can't update the field type with this method. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see `Data + Catalog resource + project `__. + + Returns: + Callable[[~.UpdateTagTemplateFieldRequest], + Awaitable[~.TagTemplateField]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_tag_template_field' not in self._stubs: + self._stubs['update_tag_template_field'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/UpdateTagTemplateField', + request_serializer=datacatalog.UpdateTagTemplateFieldRequest.serialize, + response_deserializer=tags.TagTemplateField.deserialize, + ) + return self._stubs['update_tag_template_field'] + + @property + def rename_tag_template_field(self) -> Callable[ + [datacatalog.RenameTagTemplateFieldRequest], + Awaitable[tags.TagTemplateField]]: + r"""Return a callable for the rename tag template field method over gRPC. + + Renames a field in a tag template. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see [Data + Catalog resource project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project). + + Returns: + Callable[[~.RenameTagTemplateFieldRequest], + Awaitable[~.TagTemplateField]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'rename_tag_template_field' not in self._stubs: + self._stubs['rename_tag_template_field'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/RenameTagTemplateField', + request_serializer=datacatalog.RenameTagTemplateFieldRequest.serialize, + response_deserializer=tags.TagTemplateField.deserialize, + ) + return self._stubs['rename_tag_template_field'] + + @property + def rename_tag_template_field_enum_value(self) -> Callable[ + [datacatalog.RenameTagTemplateFieldEnumValueRequest], + Awaitable[tags.TagTemplateField]]: + r"""Return a callable for the rename tag template field enum + value method over gRPC. + + Renames an enum value in a tag template. + + Within a single enum field, enum values must be unique. + + Returns: + Callable[[~.RenameTagTemplateFieldEnumValueRequest], + Awaitable[~.TagTemplateField]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'rename_tag_template_field_enum_value' not in self._stubs: + self._stubs['rename_tag_template_field_enum_value'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/RenameTagTemplateFieldEnumValue', + request_serializer=datacatalog.RenameTagTemplateFieldEnumValueRequest.serialize, + response_deserializer=tags.TagTemplateField.deserialize, + ) + return self._stubs['rename_tag_template_field_enum_value'] + + @property + def delete_tag_template_field(self) -> Callable[ + [datacatalog.DeleteTagTemplateFieldRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete tag template field method over gRPC. + + Deletes a field in a tag template and all uses of this field + from the tags based on this template. + + You must enable the Data Catalog API in the project identified + by the ``name`` parameter. For more information, see `Data + Catalog resource + project `__. + + Returns: + Callable[[~.DeleteTagTemplateFieldRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_tag_template_field' not in self._stubs: + self._stubs['delete_tag_template_field'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/DeleteTagTemplateField', + request_serializer=datacatalog.DeleteTagTemplateFieldRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_tag_template_field'] + + @property + def create_tag(self) -> Callable[ + [datacatalog.CreateTagRequest], + Awaitable[tags.Tag]]: + r"""Return a callable for the create tag method over gRPC. + + Creates a tag and assigns it to: + + - An [Entry][google.cloud.datacatalog.v1.Entry] if the method + name is + ``projects.locations.entryGroups.entries.tags.create``. + - Or [EntryGroup][google.cloud.datacatalog.v1.EntryGroup]if the + method name is + ``projects.locations.entryGroups.tags.create``. + + Note: The project identified by the ``parent`` parameter for the + [tag] + (https://cloud.google.com/data-catalog/docs/reference/rest/v1/projects.locations.entryGroups.entries.tags/create#path-parameters) + and the [tag template] + (https://cloud.google.com/data-catalog/docs/reference/rest/v1/projects.locations.tagTemplates/create#path-parameters) + used to create the tag must be in the same organization. + + Returns: + Callable[[~.CreateTagRequest], + Awaitable[~.Tag]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_tag' not in self._stubs: + self._stubs['create_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/CreateTag', + request_serializer=datacatalog.CreateTagRequest.serialize, + response_deserializer=tags.Tag.deserialize, + ) + return self._stubs['create_tag'] + + @property + def update_tag(self) -> Callable[ + [datacatalog.UpdateTagRequest], + Awaitable[tags.Tag]]: + r"""Return a callable for the update tag method over gRPC. + + Updates an existing tag. + + Returns: + Callable[[~.UpdateTagRequest], + Awaitable[~.Tag]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_tag' not in self._stubs: + self._stubs['update_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/UpdateTag', + request_serializer=datacatalog.UpdateTagRequest.serialize, + response_deserializer=tags.Tag.deserialize, + ) + return self._stubs['update_tag'] + + @property + def delete_tag(self) -> Callable[ + [datacatalog.DeleteTagRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete tag method over gRPC. + + Deletes a tag. + + Returns: + Callable[[~.DeleteTagRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_tag' not in self._stubs: + self._stubs['delete_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/DeleteTag', + request_serializer=datacatalog.DeleteTagRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_tag'] + + @property + def list_tags(self) -> Callable[ + [datacatalog.ListTagsRequest], + Awaitable[datacatalog.ListTagsResponse]]: + r"""Return a callable for the list tags method over gRPC. + + Lists tags assigned to an + [Entry][google.cloud.datacatalog.v1.Entry]. The + [columns][google.cloud.datacatalog.v1.Tag.column] in the + response are lowercased. + + Returns: + Callable[[~.ListTagsRequest], + Awaitable[~.ListTagsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_tags' not in self._stubs: + self._stubs['list_tags'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/ListTags', + request_serializer=datacatalog.ListTagsRequest.serialize, + response_deserializer=datacatalog.ListTagsResponse.deserialize, + ) + return self._stubs['list_tags'] + + @property + def reconcile_tags(self) -> Callable[ + [datacatalog.ReconcileTagsRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the reconcile tags method over gRPC. + + ``ReconcileTags`` creates or updates a list of tags on the + entry. If the + [ReconcileTagsRequest.force_delete_missing][google.cloud.datacatalog.v1.ReconcileTagsRequest.force_delete_missing] + parameter is set, the operation deletes tags not included in the + input tag list. + + ``ReconcileTags`` returns a [long-running operation] + [google.longrunning.Operation] resource that can be queried with + [Operations.GetOperation][google.longrunning.Operations.GetOperation] + to return [ReconcileTagsMetadata] + [google.cloud.datacatalog.v1.ReconcileTagsMetadata] and a + [ReconcileTagsResponse] + [google.cloud.datacatalog.v1.ReconcileTagsResponse] message. + + Returns: + Callable[[~.ReconcileTagsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'reconcile_tags' not in self._stubs: + self._stubs['reconcile_tags'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/ReconcileTags', + request_serializer=datacatalog.ReconcileTagsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['reconcile_tags'] + + @property + def star_entry(self) -> Callable[ + [datacatalog.StarEntryRequest], + Awaitable[datacatalog.StarEntryResponse]]: + r"""Return a callable for the star entry method over gRPC. + + Marks an [Entry][google.cloud.datacatalog.v1.Entry] as starred + by the current user. Starring information is private to each + user. + + Returns: + Callable[[~.StarEntryRequest], + Awaitable[~.StarEntryResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'star_entry' not in self._stubs: + self._stubs['star_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/StarEntry', + request_serializer=datacatalog.StarEntryRequest.serialize, + response_deserializer=datacatalog.StarEntryResponse.deserialize, + ) + return self._stubs['star_entry'] + + @property + def unstar_entry(self) -> Callable[ + [datacatalog.UnstarEntryRequest], + Awaitable[datacatalog.UnstarEntryResponse]]: + r"""Return a callable for the unstar entry method over gRPC. + + Marks an [Entry][google.cloud.datacatalog.v1.Entry] as NOT + starred by the current user. Starring information is private to + each user. + + Returns: + Callable[[~.UnstarEntryRequest], + Awaitable[~.UnstarEntryResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'unstar_entry' not in self._stubs: + self._stubs['unstar_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/UnstarEntry', + request_serializer=datacatalog.UnstarEntryRequest.serialize, + response_deserializer=datacatalog.UnstarEntryResponse.deserialize, + ) + return self._stubs['unstar_entry'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets an access control policy for a resource. Replaces any + existing policy. + + Supported resources are: + + - Tag templates + - Entry groups + + Note: This method sets policies only within Data Catalog and + can't be used to manage policies in BigQuery, Pub/Sub, Dataproc + Metastore, and any external Google Cloud Platform resources + synced with the Data Catalog. + + To call this method, you must have the following Google IAM + permissions: + + - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on + tag templates. + - ``datacatalog.entryGroups.setIamPolicy`` to set policies on + entry groups. + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a resource. + + May return: + + - A\ ``NOT_FOUND`` error if the resource doesn't exist or you + don't have the permission to view it. + - An empty policy if the resource exists but doesn't have a set + policy. + + Supported resources are: + + - Tag templates + - Entry groups + + Note: This method doesn't get policies from Google Cloud + Platform resources ingested into Data Catalog. + + To call this method, you must have the following Google IAM + permissions: + + - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on + tag templates. + - ``datacatalog.entryGroups.getIamPolicy`` to get policies on + entry groups. + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: + r"""Return a callable for the test iam permissions method over gRPC. + + Gets your permissions on a resource. + + Returns an empty set of permissions if the resource + doesn't exist. + + Supported resources are: + + - Tag templates + - Entry groups + + Note: This method gets policies only within Data Catalog + and can't be used to get policies from BigQuery, + Pub/Sub, Dataproc Metastore, and any external Google + Cloud Platform resources ingested into Data Catalog. + + No Google IAM permissions are required to call this + method. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + @property + def import_entries(self) -> Callable[ + [datacatalog.ImportEntriesRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the import entries method over gRPC. + + Imports entries from a source, such as data previously dumped + into a Cloud Storage bucket, into Data Catalog. Import of + entries is a sync operation that reconciles the state of the + third-party system with the Data Catalog. + + ``ImportEntries`` accepts source data snapshots of a third-party + system. Snapshot should be delivered as a .wire or + base65-encoded .txt file containing a sequence of Protocol + Buffer messages of + [DumpItem][google.cloud.datacatalog.v1.DumpItem] type. + + ``ImportEntries`` returns a [long-running operation] + [google.longrunning.Operation] resource that can be queried with + [Operations.GetOperation][google.longrunning.Operations.GetOperation] + to return + [ImportEntriesMetadata][google.cloud.datacatalog.v1.ImportEntriesMetadata] + and an + [ImportEntriesResponse][google.cloud.datacatalog.v1.ImportEntriesResponse] + message. + + Returns: + Callable[[~.ImportEntriesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'import_entries' not in self._stubs: + self._stubs['import_entries'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.DataCatalog/ImportEntries', + request_serializer=datacatalog.ImportEntriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['import_entries'] + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ( + 'DataCatalogGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/__init__.py new file mode 100644 index 000000000000..cde5f3a05abb --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import PolicyTagManagerClient +from .async_client import PolicyTagManagerAsyncClient + +__all__ = ( + 'PolicyTagManagerClient', + 'PolicyTagManagerAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py new file mode 100644 index 000000000000..8c2dfaf97b90 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py @@ -0,0 +1,1819 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.datacatalog_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.datacatalog_v1.services.policy_tag_manager import pagers +from google.cloud.datacatalog_v1.types import policytagmanager +from google.cloud.datacatalog_v1.types import timestamps +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .transports.base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport +from .client import PolicyTagManagerClient + + +class PolicyTagManagerAsyncClient: + """Policy Tag Manager API service allows you to manage your + policy tags and taxonomies. + + Policy tags are used to tag BigQuery columns and apply + additional access control policies. A taxonomy is a hierarchical + grouping of policy tags that classify data along a common axis. + """ + + _client: PolicyTagManagerClient + + DEFAULT_ENDPOINT = PolicyTagManagerClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PolicyTagManagerClient.DEFAULT_MTLS_ENDPOINT + + policy_tag_path = staticmethod(PolicyTagManagerClient.policy_tag_path) + parse_policy_tag_path = staticmethod(PolicyTagManagerClient.parse_policy_tag_path) + taxonomy_path = staticmethod(PolicyTagManagerClient.taxonomy_path) + parse_taxonomy_path = staticmethod(PolicyTagManagerClient.parse_taxonomy_path) + common_billing_account_path = staticmethod(PolicyTagManagerClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(PolicyTagManagerClient.parse_common_billing_account_path) + common_folder_path = staticmethod(PolicyTagManagerClient.common_folder_path) + parse_common_folder_path = staticmethod(PolicyTagManagerClient.parse_common_folder_path) + common_organization_path = staticmethod(PolicyTagManagerClient.common_organization_path) + parse_common_organization_path = staticmethod(PolicyTagManagerClient.parse_common_organization_path) + common_project_path = staticmethod(PolicyTagManagerClient.common_project_path) + parse_common_project_path = staticmethod(PolicyTagManagerClient.parse_common_project_path) + common_location_path = staticmethod(PolicyTagManagerClient.common_location_path) + parse_common_location_path = staticmethod(PolicyTagManagerClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerAsyncClient: The constructed client. + """ + return PolicyTagManagerClient.from_service_account_info.__func__(PolicyTagManagerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerAsyncClient: The constructed client. + """ + return PolicyTagManagerClient.from_service_account_file.__func__(PolicyTagManagerAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return PolicyTagManagerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> PolicyTagManagerTransport: + """Returns the transport used by the client instance. + + Returns: + PolicyTagManagerTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(PolicyTagManagerClient).get_transport_class, type(PolicyTagManagerClient)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, PolicyTagManagerTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the policy tag manager client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.PolicyTagManagerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = PolicyTagManagerClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def create_taxonomy(self, + request: Optional[Union[policytagmanager.CreateTaxonomyRequest, dict]] = None, + *, + parent: Optional[str] = None, + taxonomy: Optional[policytagmanager.Taxonomy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Creates a taxonomy in a specified project. + + The taxonomy is initially empty, that is, it doesn't + contain policy tags. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_create_taxonomy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.CreateTaxonomyRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_taxonomy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.CreateTaxonomyRequest, dict]]): + The request object. Request message for + [CreateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.CreateTaxonomy]. + parent (:class:`str`): + Required. Resource name of the + project that the taxonomy will belong + to. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + taxonomy (:class:`google.cloud.datacatalog_v1.types.Taxonomy`): + The taxonomy to create. + This corresponds to the ``taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Taxonomy: + A taxonomy is a collection of hierarchical policy tags that classify data + along a common axis. + + For example, a "data sensitivity" taxonomy might + contain the following policy tags: + + :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` + + A "data origin" taxonomy might contain the following + policy tags: + + :literal:`\` + User data + Employee data + Partner data + Public data`\ \` + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, taxonomy]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = policytagmanager.CreateTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if taxonomy is not None: + request.taxonomy = taxonomy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_taxonomy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_taxonomy(self, + request: Optional[Union[policytagmanager.DeleteTaxonomyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a taxonomy, including all policy tags in this + taxonomy, their associated policies, and the policy tags + references from BigQuery columns. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_delete_taxonomy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteTaxonomyRequest( + name="name_value", + ) + + # Make the request + await client.delete_taxonomy(request=request) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.DeleteTaxonomyRequest, dict]]): + The request object. Request message for + [DeleteTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.DeleteTaxonomy]. + name (:class:`str`): + Required. Resource name of the + taxonomy to delete. + Note: All policy tags in this taxonomy + are also deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = policytagmanager.DeleteTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_taxonomy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def update_taxonomy(self, + request: Optional[Union[policytagmanager.UpdateTaxonomyRequest, dict]] = None, + *, + taxonomy: Optional[policytagmanager.Taxonomy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Updates a taxonomy, including its display name, + description, and activated policy types. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_update_taxonomy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.UpdateTaxonomyRequest( + ) + + # Make the request + response = await client.update_taxonomy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.UpdateTaxonomyRequest, dict]]): + The request object. Request message for + [UpdateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.UpdateTaxonomy]. + taxonomy (:class:`google.cloud.datacatalog_v1.types.Taxonomy`): + The taxonomy to update. You can + update only its description, display + name, and activated policy types. + + This corresponds to the ``taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Taxonomy: + A taxonomy is a collection of hierarchical policy tags that classify data + along a common axis. + + For example, a "data sensitivity" taxonomy might + contain the following policy tags: + + :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` + + A "data origin" taxonomy might contain the following + policy tags: + + :literal:`\` + User data + Employee data + Partner data + Public data`\ \` + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([taxonomy]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = policytagmanager.UpdateTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if taxonomy is not None: + request.taxonomy = taxonomy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_taxonomy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("taxonomy.name", request.taxonomy.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_taxonomies(self, + request: Optional[Union[policytagmanager.ListTaxonomiesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTaxonomiesAsyncPager: + r"""Lists all taxonomies in a project in a particular + location that you have a permission to view. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_list_taxonomies(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.ListTaxonomiesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_taxonomies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.ListTaxonomiesRequest, dict]]): + The request object. Request message for + [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. + parent (:class:`str`): + Required. Resource name of the + project to list the taxonomies of. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListTaxonomiesAsyncPager: + Response message for + [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = policytagmanager.ListTaxonomiesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_taxonomies, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTaxonomiesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_taxonomy(self, + request: Optional[Union[policytagmanager.GetTaxonomyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Gets a taxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_get_taxonomy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.GetTaxonomyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_taxonomy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.GetTaxonomyRequest, dict]]): + The request object. Request message for + [GetTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.GetTaxonomy]. + name (:class:`str`): + Required. Resource name of the + taxonomy to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Taxonomy: + A taxonomy is a collection of hierarchical policy tags that classify data + along a common axis. + + For example, a "data sensitivity" taxonomy might + contain the following policy tags: + + :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` + + A "data origin" taxonomy might contain the following + policy tags: + + :literal:`\` + User data + Employee data + Partner data + Public data`\ \` + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = policytagmanager.GetTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_taxonomy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_policy_tag(self, + request: Optional[Union[policytagmanager.CreatePolicyTagRequest, dict]] = None, + *, + parent: Optional[str] = None, + policy_tag: Optional[policytagmanager.PolicyTag] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Creates a policy tag in a taxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_create_policy_tag(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.CreatePolicyTagRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_policy_tag(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.CreatePolicyTagRequest, dict]]): + The request object. Request message for + [CreatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.CreatePolicyTag]. + parent (:class:`str`): + Required. Resource name of the + taxonomy that the policy tag will belong + to. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + policy_tag (:class:`google.cloud.datacatalog_v1.types.PolicyTag`): + The policy tag to create. + This corresponds to the ``policy_tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.PolicyTag: + Denotes one policy tag in a taxonomy, for example, SSN. + + Policy tags can be defined in a hierarchy. For + example: + + :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` + + Where the "Geolocation" policy tag contains three + children. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, policy_tag]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = policytagmanager.CreatePolicyTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if policy_tag is not None: + request.policy_tag = policy_tag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_policy_tag, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_policy_tag(self, + request: Optional[Union[policytagmanager.DeletePolicyTagRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a policy tag together with the following: + + - All of its descendant policy tags, if any + - Policies associated with the policy tag and its descendants + - References from BigQuery table schema of the policy tag and + its descendants + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_delete_policy_tag(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeletePolicyTagRequest( + name="name_value", + ) + + # Make the request + await client.delete_policy_tag(request=request) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.DeletePolicyTagRequest, dict]]): + The request object. Request message for + [DeletePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.DeletePolicyTag]. + name (:class:`str`): + Required. Resource name of the policy + tag to delete. + Note: All of its descendant policy tags + are also deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = policytagmanager.DeletePolicyTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_policy_tag, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def update_policy_tag(self, + request: Optional[Union[policytagmanager.UpdatePolicyTagRequest, dict]] = None, + *, + policy_tag: Optional[policytagmanager.PolicyTag] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Updates a policy tag, including its display + name, description, and parent policy tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_update_policy_tag(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.UpdatePolicyTagRequest( + ) + + # Make the request + response = await client.update_policy_tag(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.UpdatePolicyTagRequest, dict]]): + The request object. Request message for + [UpdatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.UpdatePolicyTag]. + policy_tag (:class:`google.cloud.datacatalog_v1.types.PolicyTag`): + The policy tag to update. You can + update only its description, display + name, and parent policy tag fields. + + This corresponds to the ``policy_tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.PolicyTag: + Denotes one policy tag in a taxonomy, for example, SSN. + + Policy tags can be defined in a hierarchy. For + example: + + :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` + + Where the "Geolocation" policy tag contains three + children. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([policy_tag]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = policytagmanager.UpdatePolicyTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if policy_tag is not None: + request.policy_tag = policy_tag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_policy_tag, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("policy_tag.name", request.policy_tag.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_policy_tags(self, + request: Optional[Union[policytagmanager.ListPolicyTagsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPolicyTagsAsyncPager: + r"""Lists all policy tags in a taxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_list_policy_tags(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.ListPolicyTagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_policy_tags(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.ListPolicyTagsRequest, dict]]): + The request object. Request message for + [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. + parent (:class:`str`): + Required. Resource name of the + taxonomy to list the policy tags of. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListPolicyTagsAsyncPager: + Response message for + [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = policytagmanager.ListPolicyTagsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_policy_tags, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPolicyTagsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_policy_tag(self, + request: Optional[Union[policytagmanager.GetPolicyTagRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Gets a policy tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_get_policy_tag(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.GetPolicyTagRequest( + name="name_value", + ) + + # Make the request + response = await client.get_policy_tag(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.GetPolicyTagRequest, dict]]): + The request object. Request message for + [GetPolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.GetPolicyTag]. + name (:class:`str`): + Required. Resource name of the policy + tag. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.PolicyTag: + Denotes one policy tag in a taxonomy, for example, SSN. + + Policy tags can be defined in a hierarchy. For + example: + + :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` + + Where the "Geolocation" policy tag contains three + children. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = policytagmanager.GetPolicyTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_policy_tag, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy(self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM policy for a policy tag or a taxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_get_iam_policy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): + The request object. Request message for ``GetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy(self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM policy for a policy tag or a taxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_set_iam_policy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): + The request object. Request message for ``SetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions(self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns your permissions on a specified policy tag or + taxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_test_iam_permissions(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def __aenter__(self) -> "PolicyTagManagerAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "PolicyTagManagerAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py new file mode 100644 index 000000000000..a7efe8b14108 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py @@ -0,0 +1,2029 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.datacatalog_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.datacatalog_v1.services.policy_tag_manager import pagers +from google.cloud.datacatalog_v1.types import policytagmanager +from google.cloud.datacatalog_v1.types import timestamps +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .transports.base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import PolicyTagManagerGrpcTransport +from .transports.grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport + + +class PolicyTagManagerClientMeta(type): + """Metaclass for the PolicyTagManager client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[PolicyTagManagerTransport]] + _transport_registry["grpc"] = PolicyTagManagerGrpcTransport + _transport_registry["grpc_asyncio"] = PolicyTagManagerGrpcAsyncIOTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[PolicyTagManagerTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PolicyTagManagerClient(metaclass=PolicyTagManagerClientMeta): + """Policy Tag Manager API service allows you to manage your + policy tags and taxonomies. + + Policy tags are used to tag BigQuery columns and apply + additional access control policies. A taxonomy is a hierarchical + grouping of policy tags that classify data along a common axis. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "datacatalog.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PolicyTagManagerTransport: + """Returns the transport used by the client instance. + + Returns: + PolicyTagManagerTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def policy_tag_path(project: str,location: str,taxonomy: str,policy_tag: str,) -> str: + """Returns a fully-qualified policy_tag string.""" + return "projects/{project}/locations/{location}/taxonomies/{taxonomy}/policyTags/{policy_tag}".format(project=project, location=location, taxonomy=taxonomy, policy_tag=policy_tag, ) + + @staticmethod + def parse_policy_tag_path(path: str) -> Dict[str,str]: + """Parses a policy_tag path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/taxonomies/(?P.+?)/policyTags/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def taxonomy_path(project: str,location: str,taxonomy: str,) -> str: + """Returns a fully-qualified taxonomy string.""" + return "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format(project=project, location=location, taxonomy=taxonomy, ) + + @staticmethod + def parse_taxonomy_path(path: str) -> Dict[str,str]: + """Parses a taxonomy path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/taxonomies/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, PolicyTagManagerTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the policy tag manager client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, PolicyTagManagerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, PolicyTagManagerTransport): + # transport is a PolicyTagManagerTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_taxonomy(self, + request: Optional[Union[policytagmanager.CreateTaxonomyRequest, dict]] = None, + *, + parent: Optional[str] = None, + taxonomy: Optional[policytagmanager.Taxonomy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Creates a taxonomy in a specified project. + + The taxonomy is initially empty, that is, it doesn't + contain policy tags. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_create_taxonomy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1.CreateTaxonomyRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_taxonomy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.CreateTaxonomyRequest, dict]): + The request object. Request message for + [CreateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.CreateTaxonomy]. + parent (str): + Required. Resource name of the + project that the taxonomy will belong + to. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + taxonomy (google.cloud.datacatalog_v1.types.Taxonomy): + The taxonomy to create. + This corresponds to the ``taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Taxonomy: + A taxonomy is a collection of hierarchical policy tags that classify data + along a common axis. + + For example, a "data sensitivity" taxonomy might + contain the following policy tags: + + :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` + + A "data origin" taxonomy might contain the following + policy tags: + + :literal:`\` + User data + Employee data + Partner data + Public data`\ \` + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, taxonomy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.CreateTaxonomyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.CreateTaxonomyRequest): + request = policytagmanager.CreateTaxonomyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if taxonomy is not None: + request.taxonomy = taxonomy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_taxonomy(self, + request: Optional[Union[policytagmanager.DeleteTaxonomyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a taxonomy, including all policy tags in this + taxonomy, their associated policies, and the policy tags + references from BigQuery columns. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_delete_taxonomy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteTaxonomyRequest( + name="name_value", + ) + + # Make the request + client.delete_taxonomy(request=request) + + Args: + request (Union[google.cloud.datacatalog_v1.types.DeleteTaxonomyRequest, dict]): + The request object. Request message for + [DeleteTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.DeleteTaxonomy]. + name (str): + Required. Resource name of the + taxonomy to delete. + Note: All policy tags in this taxonomy + are also deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.DeleteTaxonomyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.DeleteTaxonomyRequest): + request = policytagmanager.DeleteTaxonomyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def update_taxonomy(self, + request: Optional[Union[policytagmanager.UpdateTaxonomyRequest, dict]] = None, + *, + taxonomy: Optional[policytagmanager.Taxonomy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Updates a taxonomy, including its display name, + description, and activated policy types. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_update_taxonomy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1.UpdateTaxonomyRequest( + ) + + # Make the request + response = client.update_taxonomy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.UpdateTaxonomyRequest, dict]): + The request object. Request message for + [UpdateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.UpdateTaxonomy]. + taxonomy (google.cloud.datacatalog_v1.types.Taxonomy): + The taxonomy to update. You can + update only its description, display + name, and activated policy types. + + This corresponds to the ``taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Taxonomy: + A taxonomy is a collection of hierarchical policy tags that classify data + along a common axis. + + For example, a "data sensitivity" taxonomy might + contain the following policy tags: + + :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` + + A "data origin" taxonomy might contain the following + policy tags: + + :literal:`\` + User data + Employee data + Partner data + Public data`\ \` + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([taxonomy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.UpdateTaxonomyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.UpdateTaxonomyRequest): + request = policytagmanager.UpdateTaxonomyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if taxonomy is not None: + request.taxonomy = taxonomy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("taxonomy.name", request.taxonomy.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_taxonomies(self, + request: Optional[Union[policytagmanager.ListTaxonomiesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTaxonomiesPager: + r"""Lists all taxonomies in a project in a particular + location that you have a permission to view. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_list_taxonomies(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1.ListTaxonomiesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_taxonomies(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.ListTaxonomiesRequest, dict]): + The request object. Request message for + [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. + parent (str): + Required. Resource name of the + project to list the taxonomies of. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListTaxonomiesPager: + Response message for + [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.ListTaxonomiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.ListTaxonomiesRequest): + request = policytagmanager.ListTaxonomiesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_taxonomies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTaxonomiesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_taxonomy(self, + request: Optional[Union[policytagmanager.GetTaxonomyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Gets a taxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_get_taxonomy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1.GetTaxonomyRequest( + name="name_value", + ) + + # Make the request + response = client.get_taxonomy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.GetTaxonomyRequest, dict]): + The request object. Request message for + [GetTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.GetTaxonomy]. + name (str): + Required. Resource name of the + taxonomy to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Taxonomy: + A taxonomy is a collection of hierarchical policy tags that classify data + along a common axis. + + For example, a "data sensitivity" taxonomy might + contain the following policy tags: + + :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` + + A "data origin" taxonomy might contain the following + policy tags: + + :literal:`\` + User data + Employee data + Partner data + Public data`\ \` + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.GetTaxonomyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.GetTaxonomyRequest): + request = policytagmanager.GetTaxonomyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_policy_tag(self, + request: Optional[Union[policytagmanager.CreatePolicyTagRequest, dict]] = None, + *, + parent: Optional[str] = None, + policy_tag: Optional[policytagmanager.PolicyTag] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Creates a policy tag in a taxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_create_policy_tag(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1.CreatePolicyTagRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_policy_tag(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.CreatePolicyTagRequest, dict]): + The request object. Request message for + [CreatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.CreatePolicyTag]. + parent (str): + Required. Resource name of the + taxonomy that the policy tag will belong + to. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + policy_tag (google.cloud.datacatalog_v1.types.PolicyTag): + The policy tag to create. + This corresponds to the ``policy_tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.PolicyTag: + Denotes one policy tag in a taxonomy, for example, SSN. + + Policy tags can be defined in a hierarchy. For + example: + + :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` + + Where the "Geolocation" policy tag contains three + children. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, policy_tag]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.CreatePolicyTagRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.CreatePolicyTagRequest): + request = policytagmanager.CreatePolicyTagRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if policy_tag is not None: + request.policy_tag = policy_tag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_policy_tag] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_policy_tag(self, + request: Optional[Union[policytagmanager.DeletePolicyTagRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a policy tag together with the following: + + - All of its descendant policy tags, if any + - Policies associated with the policy tag and its descendants + - References from BigQuery table schema of the policy tag and + its descendants + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_delete_policy_tag(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeletePolicyTagRequest( + name="name_value", + ) + + # Make the request + client.delete_policy_tag(request=request) + + Args: + request (Union[google.cloud.datacatalog_v1.types.DeletePolicyTagRequest, dict]): + The request object. Request message for + [DeletePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.DeletePolicyTag]. + name (str): + Required. Resource name of the policy + tag to delete. + Note: All of its descendant policy tags + are also deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.DeletePolicyTagRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.DeletePolicyTagRequest): + request = policytagmanager.DeletePolicyTagRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_policy_tag] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def update_policy_tag(self, + request: Optional[Union[policytagmanager.UpdatePolicyTagRequest, dict]] = None, + *, + policy_tag: Optional[policytagmanager.PolicyTag] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Updates a policy tag, including its display + name, description, and parent policy tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_update_policy_tag(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1.UpdatePolicyTagRequest( + ) + + # Make the request + response = client.update_policy_tag(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.UpdatePolicyTagRequest, dict]): + The request object. Request message for + [UpdatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.UpdatePolicyTag]. + policy_tag (google.cloud.datacatalog_v1.types.PolicyTag): + The policy tag to update. You can + update only its description, display + name, and parent policy tag fields. + + This corresponds to the ``policy_tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.PolicyTag: + Denotes one policy tag in a taxonomy, for example, SSN. + + Policy tags can be defined in a hierarchy. For + example: + + :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` + + Where the "Geolocation" policy tag contains three + children. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([policy_tag]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.UpdatePolicyTagRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.UpdatePolicyTagRequest): + request = policytagmanager.UpdatePolicyTagRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if policy_tag is not None: + request.policy_tag = policy_tag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_policy_tag] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("policy_tag.name", request.policy_tag.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_policy_tags(self, + request: Optional[Union[policytagmanager.ListPolicyTagsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPolicyTagsPager: + r"""Lists all policy tags in a taxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_list_policy_tags(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1.ListPolicyTagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_policy_tags(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.ListPolicyTagsRequest, dict]): + The request object. Request message for + [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. + parent (str): + Required. Resource name of the + taxonomy to list the policy tags of. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListPolicyTagsPager: + Response message for + [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.ListPolicyTagsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.ListPolicyTagsRequest): + request = policytagmanager.ListPolicyTagsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_policy_tags] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPolicyTagsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_policy_tag(self, + request: Optional[Union[policytagmanager.GetPolicyTagRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Gets a policy tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_get_policy_tag(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1.GetPolicyTagRequest( + name="name_value", + ) + + # Make the request + response = client.get_policy_tag(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.GetPolicyTagRequest, dict]): + The request object. Request message for + [GetPolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.GetPolicyTag]. + name (str): + Required. Resource name of the policy + tag. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.PolicyTag: + Denotes one policy tag in a taxonomy, for example, SSN. + + Policy tags can be defined in a hierarchy. For + example: + + :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` + + Where the "Geolocation" policy tag contains three + children. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.GetPolicyTagRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.GetPolicyTagRequest): + request = policytagmanager.GetPolicyTagRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_policy_tag] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM policy for a policy tag or a taxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_get_iam_policy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): + The request object. Request message for ``GetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM policy for a policy tag or a taxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_set_iam_policy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): + The request object. Request message for ``SetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns your permissions on a specified policy tag or + taxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_test_iam_permissions(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "PolicyTagManagerClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "PolicyTagManagerClient", +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/pagers.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/pagers.py new file mode 100644 index 000000000000..f5323f720e10 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/pagers.py @@ -0,0 +1,260 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.datacatalog_v1.types import policytagmanager + + +class ListTaxonomiesPager: + """A pager for iterating through ``list_taxonomies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1.types.ListTaxonomiesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``taxonomies`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTaxonomies`` requests and continue to iterate + through the ``taxonomies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1.types.ListTaxonomiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., policytagmanager.ListTaxonomiesResponse], + request: policytagmanager.ListTaxonomiesRequest, + response: policytagmanager.ListTaxonomiesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1.types.ListTaxonomiesRequest): + The initial request object. + response (google.cloud.datacatalog_v1.types.ListTaxonomiesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = policytagmanager.ListTaxonomiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[policytagmanager.ListTaxonomiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[policytagmanager.Taxonomy]: + for page in self.pages: + yield from page.taxonomies + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTaxonomiesAsyncPager: + """A pager for iterating through ``list_taxonomies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1.types.ListTaxonomiesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``taxonomies`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTaxonomies`` requests and continue to iterate + through the ``taxonomies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1.types.ListTaxonomiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[policytagmanager.ListTaxonomiesResponse]], + request: policytagmanager.ListTaxonomiesRequest, + response: policytagmanager.ListTaxonomiesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1.types.ListTaxonomiesRequest): + The initial request object. + response (google.cloud.datacatalog_v1.types.ListTaxonomiesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = policytagmanager.ListTaxonomiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[policytagmanager.ListTaxonomiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[policytagmanager.Taxonomy]: + async def async_generator(): + async for page in self.pages: + for response in page.taxonomies: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPolicyTagsPager: + """A pager for iterating through ``list_policy_tags`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1.types.ListPolicyTagsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``policy_tags`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPolicyTags`` requests and continue to iterate + through the ``policy_tags`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1.types.ListPolicyTagsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., policytagmanager.ListPolicyTagsResponse], + request: policytagmanager.ListPolicyTagsRequest, + response: policytagmanager.ListPolicyTagsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1.types.ListPolicyTagsRequest): + The initial request object. + response (google.cloud.datacatalog_v1.types.ListPolicyTagsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = policytagmanager.ListPolicyTagsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[policytagmanager.ListPolicyTagsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[policytagmanager.PolicyTag]: + for page in self.pages: + yield from page.policy_tags + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPolicyTagsAsyncPager: + """A pager for iterating through ``list_policy_tags`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1.types.ListPolicyTagsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``policy_tags`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPolicyTags`` requests and continue to iterate + through the ``policy_tags`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1.types.ListPolicyTagsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[policytagmanager.ListPolicyTagsResponse]], + request: policytagmanager.ListPolicyTagsRequest, + response: policytagmanager.ListPolicyTagsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1.types.ListPolicyTagsRequest): + The initial request object. + response (google.cloud.datacatalog_v1.types.ListPolicyTagsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = policytagmanager.ListPolicyTagsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[policytagmanager.ListPolicyTagsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[policytagmanager.PolicyTag]: + async def async_generator(): + async for page in self.pages: + for response in page.policy_tags: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/__init__.py new file mode 100644 index 000000000000..192f3e97b958 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import PolicyTagManagerTransport +from .grpc import PolicyTagManagerGrpcTransport +from .grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PolicyTagManagerTransport]] +_transport_registry['grpc'] = PolicyTagManagerGrpcTransport +_transport_registry['grpc_asyncio'] = PolicyTagManagerGrpcAsyncIOTransport + +__all__ = ( + 'PolicyTagManagerTransport', + 'PolicyTagManagerGrpcTransport', + 'PolicyTagManagerGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py new file mode 100644 index 000000000000..7c841038bf6f --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py @@ -0,0 +1,356 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.datacatalog_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanager +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class PolicyTagManagerTransport(abc.ABC): + """Abstract transport class for PolicyTagManager.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'datacatalog.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_taxonomy: gapic_v1.method.wrap_method( + self.create_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.delete_taxonomy: gapic_v1.method.wrap_method( + self.delete_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.update_taxonomy: gapic_v1.method.wrap_method( + self.update_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.list_taxonomies: gapic_v1.method.wrap_method( + self.list_taxonomies, + default_timeout=None, + client_info=client_info, + ), + self.get_taxonomy: gapic_v1.method.wrap_method( + self.get_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.create_policy_tag: gapic_v1.method.wrap_method( + self.create_policy_tag, + default_timeout=None, + client_info=client_info, + ), + self.delete_policy_tag: gapic_v1.method.wrap_method( + self.delete_policy_tag, + default_timeout=None, + client_info=client_info, + ), + self.update_policy_tag: gapic_v1.method.wrap_method( + self.update_policy_tag, + default_timeout=None, + client_info=client_info, + ), + self.list_policy_tags: gapic_v1.method.wrap_method( + self.list_policy_tags, + default_timeout=None, + client_info=client_info, + ), + self.get_policy_tag: gapic_v1.method.wrap_method( + self.get_policy_tag, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_taxonomy(self) -> Callable[ + [policytagmanager.CreateTaxonomyRequest], + Union[ + policytagmanager.Taxonomy, + Awaitable[policytagmanager.Taxonomy] + ]]: + raise NotImplementedError() + + @property + def delete_taxonomy(self) -> Callable[ + [policytagmanager.DeleteTaxonomyRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def update_taxonomy(self) -> Callable[ + [policytagmanager.UpdateTaxonomyRequest], + Union[ + policytagmanager.Taxonomy, + Awaitable[policytagmanager.Taxonomy] + ]]: + raise NotImplementedError() + + @property + def list_taxonomies(self) -> Callable[ + [policytagmanager.ListTaxonomiesRequest], + Union[ + policytagmanager.ListTaxonomiesResponse, + Awaitable[policytagmanager.ListTaxonomiesResponse] + ]]: + raise NotImplementedError() + + @property + def get_taxonomy(self) -> Callable[ + [policytagmanager.GetTaxonomyRequest], + Union[ + policytagmanager.Taxonomy, + Awaitable[policytagmanager.Taxonomy] + ]]: + raise NotImplementedError() + + @property + def create_policy_tag(self) -> Callable[ + [policytagmanager.CreatePolicyTagRequest], + Union[ + policytagmanager.PolicyTag, + Awaitable[policytagmanager.PolicyTag] + ]]: + raise NotImplementedError() + + @property + def delete_policy_tag(self) -> Callable[ + [policytagmanager.DeletePolicyTagRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def update_policy_tag(self) -> Callable[ + [policytagmanager.UpdatePolicyTagRequest], + Union[ + policytagmanager.PolicyTag, + Awaitable[policytagmanager.PolicyTag] + ]]: + raise NotImplementedError() + + @property + def list_policy_tags(self) -> Callable[ + [policytagmanager.ListPolicyTagsRequest], + Union[ + policytagmanager.ListPolicyTagsResponse, + Awaitable[policytagmanager.ListPolicyTagsResponse] + ]]: + raise NotImplementedError() + + @property + def get_policy_tag(self) -> Callable[ + [policytagmanager.GetPolicyTagRequest], + Union[ + policytagmanager.PolicyTag, + Awaitable[policytagmanager.PolicyTag] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'PolicyTagManagerTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py new file mode 100644 index 000000000000..7e126ee7d7c2 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py @@ -0,0 +1,671 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanager +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO + + +class PolicyTagManagerGrpcTransport(PolicyTagManagerTransport): + """gRPC backend transport for PolicyTagManager. + + Policy Tag Manager API service allows you to manage your + policy tags and taxonomies. + + Policy tags are used to tag BigQuery columns and apply + additional access control policies. A taxonomy is a hierarchical + grouping of policy tags that classify data along a common axis. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_taxonomy(self) -> Callable[ + [policytagmanager.CreateTaxonomyRequest], + policytagmanager.Taxonomy]: + r"""Return a callable for the create taxonomy method over gRPC. + + Creates a taxonomy in a specified project. + + The taxonomy is initially empty, that is, it doesn't + contain policy tags. + + Returns: + Callable[[~.CreateTaxonomyRequest], + ~.Taxonomy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_taxonomy' not in self._stubs: + self._stubs['create_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/CreateTaxonomy', + request_serializer=policytagmanager.CreateTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs['create_taxonomy'] + + @property + def delete_taxonomy(self) -> Callable[ + [policytagmanager.DeleteTaxonomyRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete taxonomy method over gRPC. + + Deletes a taxonomy, including all policy tags in this + taxonomy, their associated policies, and the policy tags + references from BigQuery columns. + + Returns: + Callable[[~.DeleteTaxonomyRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_taxonomy' not in self._stubs: + self._stubs['delete_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/DeleteTaxonomy', + request_serializer=policytagmanager.DeleteTaxonomyRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_taxonomy'] + + @property + def update_taxonomy(self) -> Callable[ + [policytagmanager.UpdateTaxonomyRequest], + policytagmanager.Taxonomy]: + r"""Return a callable for the update taxonomy method over gRPC. + + Updates a taxonomy, including its display name, + description, and activated policy types. + + Returns: + Callable[[~.UpdateTaxonomyRequest], + ~.Taxonomy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_taxonomy' not in self._stubs: + self._stubs['update_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/UpdateTaxonomy', + request_serializer=policytagmanager.UpdateTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs['update_taxonomy'] + + @property + def list_taxonomies(self) -> Callable[ + [policytagmanager.ListTaxonomiesRequest], + policytagmanager.ListTaxonomiesResponse]: + r"""Return a callable for the list taxonomies method over gRPC. + + Lists all taxonomies in a project in a particular + location that you have a permission to view. + + Returns: + Callable[[~.ListTaxonomiesRequest], + ~.ListTaxonomiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_taxonomies' not in self._stubs: + self._stubs['list_taxonomies'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/ListTaxonomies', + request_serializer=policytagmanager.ListTaxonomiesRequest.serialize, + response_deserializer=policytagmanager.ListTaxonomiesResponse.deserialize, + ) + return self._stubs['list_taxonomies'] + + @property + def get_taxonomy(self) -> Callable[ + [policytagmanager.GetTaxonomyRequest], + policytagmanager.Taxonomy]: + r"""Return a callable for the get taxonomy method over gRPC. + + Gets a taxonomy. + + Returns: + Callable[[~.GetTaxonomyRequest], + ~.Taxonomy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_taxonomy' not in self._stubs: + self._stubs['get_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/GetTaxonomy', + request_serializer=policytagmanager.GetTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs['get_taxonomy'] + + @property + def create_policy_tag(self) -> Callable[ + [policytagmanager.CreatePolicyTagRequest], + policytagmanager.PolicyTag]: + r"""Return a callable for the create policy tag method over gRPC. + + Creates a policy tag in a taxonomy. + + Returns: + Callable[[~.CreatePolicyTagRequest], + ~.PolicyTag]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_policy_tag' not in self._stubs: + self._stubs['create_policy_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/CreatePolicyTag', + request_serializer=policytagmanager.CreatePolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs['create_policy_tag'] + + @property + def delete_policy_tag(self) -> Callable[ + [policytagmanager.DeletePolicyTagRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete policy tag method over gRPC. + + Deletes a policy tag together with the following: + + - All of its descendant policy tags, if any + - Policies associated with the policy tag and its descendants + - References from BigQuery table schema of the policy tag and + its descendants + + Returns: + Callable[[~.DeletePolicyTagRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_policy_tag' not in self._stubs: + self._stubs['delete_policy_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/DeletePolicyTag', + request_serializer=policytagmanager.DeletePolicyTagRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_policy_tag'] + + @property + def update_policy_tag(self) -> Callable[ + [policytagmanager.UpdatePolicyTagRequest], + policytagmanager.PolicyTag]: + r"""Return a callable for the update policy tag method over gRPC. + + Updates a policy tag, including its display + name, description, and parent policy tag. + + Returns: + Callable[[~.UpdatePolicyTagRequest], + ~.PolicyTag]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_policy_tag' not in self._stubs: + self._stubs['update_policy_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/UpdatePolicyTag', + request_serializer=policytagmanager.UpdatePolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs['update_policy_tag'] + + @property + def list_policy_tags(self) -> Callable[ + [policytagmanager.ListPolicyTagsRequest], + policytagmanager.ListPolicyTagsResponse]: + r"""Return a callable for the list policy tags method over gRPC. + + Lists all policy tags in a taxonomy. + + Returns: + Callable[[~.ListPolicyTagsRequest], + ~.ListPolicyTagsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_policy_tags' not in self._stubs: + self._stubs['list_policy_tags'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/ListPolicyTags', + request_serializer=policytagmanager.ListPolicyTagsRequest.serialize, + response_deserializer=policytagmanager.ListPolicyTagsResponse.deserialize, + ) + return self._stubs['list_policy_tags'] + + @property + def get_policy_tag(self) -> Callable[ + [policytagmanager.GetPolicyTagRequest], + policytagmanager.PolicyTag]: + r"""Return a callable for the get policy tag method over gRPC. + + Gets a policy tag. + + Returns: + Callable[[~.GetPolicyTagRequest], + ~.PolicyTag]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_policy_tag' not in self._stubs: + self._stubs['get_policy_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/GetPolicyTag', + request_serializer=policytagmanager.GetPolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs['get_policy_tag'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the IAM policy for a policy tag or a taxonomy. + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the IAM policy for a policy tag or a taxonomy. + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns your permissions on a specified policy tag or + taxonomy. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'PolicyTagManagerGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py new file mode 100644 index 000000000000..800006698a8e --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py @@ -0,0 +1,670 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanager +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO +from .grpc import PolicyTagManagerGrpcTransport + + +class PolicyTagManagerGrpcAsyncIOTransport(PolicyTagManagerTransport): + """gRPC AsyncIO backend transport for PolicyTagManager. + + Policy Tag Manager API service allows you to manage your + policy tags and taxonomies. + + Policy tags are used to tag BigQuery columns and apply + additional access control policies. A taxonomy is a hierarchical + grouping of policy tags that classify data along a common axis. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_taxonomy(self) -> Callable[ + [policytagmanager.CreateTaxonomyRequest], + Awaitable[policytagmanager.Taxonomy]]: + r"""Return a callable for the create taxonomy method over gRPC. + + Creates a taxonomy in a specified project. + + The taxonomy is initially empty, that is, it doesn't + contain policy tags. + + Returns: + Callable[[~.CreateTaxonomyRequest], + Awaitable[~.Taxonomy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_taxonomy' not in self._stubs: + self._stubs['create_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/CreateTaxonomy', + request_serializer=policytagmanager.CreateTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs['create_taxonomy'] + + @property + def delete_taxonomy(self) -> Callable[ + [policytagmanager.DeleteTaxonomyRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete taxonomy method over gRPC. + + Deletes a taxonomy, including all policy tags in this + taxonomy, their associated policies, and the policy tags + references from BigQuery columns. + + Returns: + Callable[[~.DeleteTaxonomyRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_taxonomy' not in self._stubs: + self._stubs['delete_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/DeleteTaxonomy', + request_serializer=policytagmanager.DeleteTaxonomyRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_taxonomy'] + + @property + def update_taxonomy(self) -> Callable[ + [policytagmanager.UpdateTaxonomyRequest], + Awaitable[policytagmanager.Taxonomy]]: + r"""Return a callable for the update taxonomy method over gRPC. + + Updates a taxonomy, including its display name, + description, and activated policy types. + + Returns: + Callable[[~.UpdateTaxonomyRequest], + Awaitable[~.Taxonomy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_taxonomy' not in self._stubs: + self._stubs['update_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/UpdateTaxonomy', + request_serializer=policytagmanager.UpdateTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs['update_taxonomy'] + + @property + def list_taxonomies(self) -> Callable[ + [policytagmanager.ListTaxonomiesRequest], + Awaitable[policytagmanager.ListTaxonomiesResponse]]: + r"""Return a callable for the list taxonomies method over gRPC. + + Lists all taxonomies in a project in a particular + location that you have a permission to view. + + Returns: + Callable[[~.ListTaxonomiesRequest], + Awaitable[~.ListTaxonomiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_taxonomies' not in self._stubs: + self._stubs['list_taxonomies'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/ListTaxonomies', + request_serializer=policytagmanager.ListTaxonomiesRequest.serialize, + response_deserializer=policytagmanager.ListTaxonomiesResponse.deserialize, + ) + return self._stubs['list_taxonomies'] + + @property + def get_taxonomy(self) -> Callable[ + [policytagmanager.GetTaxonomyRequest], + Awaitable[policytagmanager.Taxonomy]]: + r"""Return a callable for the get taxonomy method over gRPC. + + Gets a taxonomy. + + Returns: + Callable[[~.GetTaxonomyRequest], + Awaitable[~.Taxonomy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_taxonomy' not in self._stubs: + self._stubs['get_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/GetTaxonomy', + request_serializer=policytagmanager.GetTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs['get_taxonomy'] + + @property + def create_policy_tag(self) -> Callable[ + [policytagmanager.CreatePolicyTagRequest], + Awaitable[policytagmanager.PolicyTag]]: + r"""Return a callable for the create policy tag method over gRPC. + + Creates a policy tag in a taxonomy. + + Returns: + Callable[[~.CreatePolicyTagRequest], + Awaitable[~.PolicyTag]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_policy_tag' not in self._stubs: + self._stubs['create_policy_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/CreatePolicyTag', + request_serializer=policytagmanager.CreatePolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs['create_policy_tag'] + + @property + def delete_policy_tag(self) -> Callable[ + [policytagmanager.DeletePolicyTagRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete policy tag method over gRPC. + + Deletes a policy tag together with the following: + + - All of its descendant policy tags, if any + - Policies associated with the policy tag and its descendants + - References from BigQuery table schema of the policy tag and + its descendants + + Returns: + Callable[[~.DeletePolicyTagRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_policy_tag' not in self._stubs: + self._stubs['delete_policy_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/DeletePolicyTag', + request_serializer=policytagmanager.DeletePolicyTagRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_policy_tag'] + + @property + def update_policy_tag(self) -> Callable[ + [policytagmanager.UpdatePolicyTagRequest], + Awaitable[policytagmanager.PolicyTag]]: + r"""Return a callable for the update policy tag method over gRPC. + + Updates a policy tag, including its display + name, description, and parent policy tag. + + Returns: + Callable[[~.UpdatePolicyTagRequest], + Awaitable[~.PolicyTag]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_policy_tag' not in self._stubs: + self._stubs['update_policy_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/UpdatePolicyTag', + request_serializer=policytagmanager.UpdatePolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs['update_policy_tag'] + + @property + def list_policy_tags(self) -> Callable[ + [policytagmanager.ListPolicyTagsRequest], + Awaitable[policytagmanager.ListPolicyTagsResponse]]: + r"""Return a callable for the list policy tags method over gRPC. + + Lists all policy tags in a taxonomy. + + Returns: + Callable[[~.ListPolicyTagsRequest], + Awaitable[~.ListPolicyTagsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_policy_tags' not in self._stubs: + self._stubs['list_policy_tags'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/ListPolicyTags', + request_serializer=policytagmanager.ListPolicyTagsRequest.serialize, + response_deserializer=policytagmanager.ListPolicyTagsResponse.deserialize, + ) + return self._stubs['list_policy_tags'] + + @property + def get_policy_tag(self) -> Callable[ + [policytagmanager.GetPolicyTagRequest], + Awaitable[policytagmanager.PolicyTag]]: + r"""Return a callable for the get policy tag method over gRPC. + + Gets a policy tag. + + Returns: + Callable[[~.GetPolicyTagRequest], + Awaitable[~.PolicyTag]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_policy_tag' not in self._stubs: + self._stubs['get_policy_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/GetPolicyTag', + request_serializer=policytagmanager.GetPolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs['get_policy_tag'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the IAM policy for a policy tag or a taxonomy. + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the IAM policy for a policy tag or a taxonomy. + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns your permissions on a specified policy tag or + taxonomy. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManager/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ( + 'PolicyTagManagerGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/__init__.py new file mode 100644 index 000000000000..0592b8ffb549 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import PolicyTagManagerSerializationClient +from .async_client import PolicyTagManagerSerializationAsyncClient + +__all__ = ( + 'PolicyTagManagerSerializationClient', + 'PolicyTagManagerSerializationAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py new file mode 100644 index 000000000000..c610f7072457 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py @@ -0,0 +1,699 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.datacatalog_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanager +from google.cloud.datacatalog_v1.types import policytagmanagerserialization +from google.cloud.datacatalog_v1.types import timestamps +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .transports.base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport +from .client import PolicyTagManagerSerializationClient + + +class PolicyTagManagerSerializationAsyncClient: + """Policy Tag Manager Serialization API service allows you to + manipulate your policy tags and taxonomies in a serialized + format. + + Taxonomy is a hierarchical group of policy tags. + """ + + _client: PolicyTagManagerSerializationClient + + DEFAULT_ENDPOINT = PolicyTagManagerSerializationClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PolicyTagManagerSerializationClient.DEFAULT_MTLS_ENDPOINT + + taxonomy_path = staticmethod(PolicyTagManagerSerializationClient.taxonomy_path) + parse_taxonomy_path = staticmethod(PolicyTagManagerSerializationClient.parse_taxonomy_path) + common_billing_account_path = staticmethod(PolicyTagManagerSerializationClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(PolicyTagManagerSerializationClient.parse_common_billing_account_path) + common_folder_path = staticmethod(PolicyTagManagerSerializationClient.common_folder_path) + parse_common_folder_path = staticmethod(PolicyTagManagerSerializationClient.parse_common_folder_path) + common_organization_path = staticmethod(PolicyTagManagerSerializationClient.common_organization_path) + parse_common_organization_path = staticmethod(PolicyTagManagerSerializationClient.parse_common_organization_path) + common_project_path = staticmethod(PolicyTagManagerSerializationClient.common_project_path) + parse_common_project_path = staticmethod(PolicyTagManagerSerializationClient.parse_common_project_path) + common_location_path = staticmethod(PolicyTagManagerSerializationClient.common_location_path) + parse_common_location_path = staticmethod(PolicyTagManagerSerializationClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerSerializationAsyncClient: The constructed client. + """ + return PolicyTagManagerSerializationClient.from_service_account_info.__func__(PolicyTagManagerSerializationAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerSerializationAsyncClient: The constructed client. + """ + return PolicyTagManagerSerializationClient.from_service_account_file.__func__(PolicyTagManagerSerializationAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return PolicyTagManagerSerializationClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> PolicyTagManagerSerializationTransport: + """Returns the transport used by the client instance. + + Returns: + PolicyTagManagerSerializationTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(PolicyTagManagerSerializationClient).get_transport_class, type(PolicyTagManagerSerializationClient)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, PolicyTagManagerSerializationTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the policy tag manager serialization client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.PolicyTagManagerSerializationTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = PolicyTagManagerSerializationClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def replace_taxonomy(self, + request: Optional[Union[policytagmanagerserialization.ReplaceTaxonomyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Replaces (updates) a taxonomy and all its policy tags. + + The taxonomy and its entire hierarchy of policy tags must be + represented literally by ``SerializedTaxonomy`` and the nested + ``SerializedPolicyTag`` messages. + + This operation automatically does the following: + + - Deletes the existing policy tags that are missing from the + ``SerializedPolicyTag``. + - Creates policy tags that don't have resource names. They are + considered new. + - Updates policy tags with valid resources names accordingly. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_replace_taxonomy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerSerializationAsyncClient() + + # Initialize request argument(s) + serialized_taxonomy = datacatalog_v1.SerializedTaxonomy() + serialized_taxonomy.display_name = "display_name_value" + + request = datacatalog_v1.ReplaceTaxonomyRequest( + name="name_value", + serialized_taxonomy=serialized_taxonomy, + ) + + # Make the request + response = await client.replace_taxonomy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.ReplaceTaxonomyRequest, dict]]): + The request object. Request message for + [ReplaceTaxonomy][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ReplaceTaxonomy]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Taxonomy: + A taxonomy is a collection of hierarchical policy tags that classify data + along a common axis. + + For example, a "data sensitivity" taxonomy might + contain the following policy tags: + + :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` + + A "data origin" taxonomy might contain the following + policy tags: + + :literal:`\` + User data + Employee data + Partner data + Public data`\ \` + + """ + # Create or coerce a protobuf request object. + request = policytagmanagerserialization.ReplaceTaxonomyRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.replace_taxonomy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def import_taxonomies(self, + request: Optional[Union[policytagmanagerserialization.ImportTaxonomiesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanagerserialization.ImportTaxonomiesResponse: + r"""Creates new taxonomies (including their policy tags) + in a given project by importing from inlined or + cross-regional sources. + + For a cross-regional source, new taxonomies are created + by copying from a source in another region. + + For an inlined source, taxonomies and policy tags are + created in bulk using nested protocol buffer structures. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_import_taxonomies(): + # Create a client + client = datacatalog_v1.PolicyTagManagerSerializationAsyncClient() + + # Initialize request argument(s) + inline_source = datacatalog_v1.InlineSource() + inline_source.taxonomies.display_name = "display_name_value" + + request = datacatalog_v1.ImportTaxonomiesRequest( + inline_source=inline_source, + parent="parent_value", + ) + + # Make the request + response = await client.import_taxonomies(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.ImportTaxonomiesRequest, dict]]): + The request object. Request message for + [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.ImportTaxonomiesResponse: + Response message for + [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. + + """ + # Create or coerce a protobuf request object. + request = policytagmanagerserialization.ImportTaxonomiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.import_taxonomies, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def export_taxonomies(self, + request: Optional[Union[policytagmanagerserialization.ExportTaxonomiesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanagerserialization.ExportTaxonomiesResponse: + r"""Exports taxonomies in the requested type and returns them, + including their policy tags. The requested taxonomies must + belong to the same project. + + This method generates ``SerializedTaxonomy`` protocol buffers + with nested policy tags that can be used as input for + ``ImportTaxonomies`` calls. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_export_taxonomies(): + # Create a client + client = datacatalog_v1.PolicyTagManagerSerializationAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.ExportTaxonomiesRequest( + serialized_taxonomies=True, + parent="parent_value", + taxonomies=['taxonomies_value1', 'taxonomies_value2'], + ) + + # Make the request + response = await client.export_taxonomies(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.ExportTaxonomiesRequest, dict]]): + The request object. Request message for + [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.ExportTaxonomiesResponse: + Response message for + [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. + + """ + # Create or coerce a protobuf request object. + request = policytagmanagerserialization.ExportTaxonomiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_taxonomies, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def __aenter__(self) -> "PolicyTagManagerSerializationAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "PolicyTagManagerSerializationAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py new file mode 100644 index 000000000000..05e50ba07d8b --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py @@ -0,0 +1,906 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.datacatalog_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanager +from google.cloud.datacatalog_v1.types import policytagmanagerserialization +from google.cloud.datacatalog_v1.types import timestamps +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .transports.base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import PolicyTagManagerSerializationGrpcTransport +from .transports.grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport + + +class PolicyTagManagerSerializationClientMeta(type): + """Metaclass for the PolicyTagManagerSerialization client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[PolicyTagManagerSerializationTransport]] + _transport_registry["grpc"] = PolicyTagManagerSerializationGrpcTransport + _transport_registry["grpc_asyncio"] = PolicyTagManagerSerializationGrpcAsyncIOTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[PolicyTagManagerSerializationTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PolicyTagManagerSerializationClient(metaclass=PolicyTagManagerSerializationClientMeta): + """Policy Tag Manager Serialization API service allows you to + manipulate your policy tags and taxonomies in a serialized + format. + + Taxonomy is a hierarchical group of policy tags. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "datacatalog.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerSerializationClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerSerializationClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PolicyTagManagerSerializationTransport: + """Returns the transport used by the client instance. + + Returns: + PolicyTagManagerSerializationTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def taxonomy_path(project: str,location: str,taxonomy: str,) -> str: + """Returns a fully-qualified taxonomy string.""" + return "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format(project=project, location=location, taxonomy=taxonomy, ) + + @staticmethod + def parse_taxonomy_path(path: str) -> Dict[str,str]: + """Parses a taxonomy path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/taxonomies/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, PolicyTagManagerSerializationTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the policy tag manager serialization client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, PolicyTagManagerSerializationTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, PolicyTagManagerSerializationTransport): + # transport is a PolicyTagManagerSerializationTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def replace_taxonomy(self, + request: Optional[Union[policytagmanagerserialization.ReplaceTaxonomyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Replaces (updates) a taxonomy and all its policy tags. + + The taxonomy and its entire hierarchy of policy tags must be + represented literally by ``SerializedTaxonomy`` and the nested + ``SerializedPolicyTag`` messages. + + This operation automatically does the following: + + - Deletes the existing policy tags that are missing from the + ``SerializedPolicyTag``. + - Creates policy tags that don't have resource names. They are + considered new. + - Updates policy tags with valid resources names accordingly. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_replace_taxonomy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerSerializationClient() + + # Initialize request argument(s) + serialized_taxonomy = datacatalog_v1.SerializedTaxonomy() + serialized_taxonomy.display_name = "display_name_value" + + request = datacatalog_v1.ReplaceTaxonomyRequest( + name="name_value", + serialized_taxonomy=serialized_taxonomy, + ) + + # Make the request + response = client.replace_taxonomy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.ReplaceTaxonomyRequest, dict]): + The request object. Request message for + [ReplaceTaxonomy][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ReplaceTaxonomy]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.Taxonomy: + A taxonomy is a collection of hierarchical policy tags that classify data + along a common axis. + + For example, a "data sensitivity" taxonomy might + contain the following policy tags: + + :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` + + A "data origin" taxonomy might contain the following + policy tags: + + :literal:`\` + User data + Employee data + Partner data + Public data`\ \` + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanagerserialization.ReplaceTaxonomyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanagerserialization.ReplaceTaxonomyRequest): + request = policytagmanagerserialization.ReplaceTaxonomyRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.replace_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def import_taxonomies(self, + request: Optional[Union[policytagmanagerserialization.ImportTaxonomiesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanagerserialization.ImportTaxonomiesResponse: + r"""Creates new taxonomies (including their policy tags) + in a given project by importing from inlined or + cross-regional sources. + + For a cross-regional source, new taxonomies are created + by copying from a source in another region. + + For an inlined source, taxonomies and policy tags are + created in bulk using nested protocol buffer structures. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_import_taxonomies(): + # Create a client + client = datacatalog_v1.PolicyTagManagerSerializationClient() + + # Initialize request argument(s) + inline_source = datacatalog_v1.InlineSource() + inline_source.taxonomies.display_name = "display_name_value" + + request = datacatalog_v1.ImportTaxonomiesRequest( + inline_source=inline_source, + parent="parent_value", + ) + + # Make the request + response = client.import_taxonomies(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.ImportTaxonomiesRequest, dict]): + The request object. Request message for + [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.ImportTaxonomiesResponse: + Response message for + [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanagerserialization.ImportTaxonomiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanagerserialization.ImportTaxonomiesRequest): + request = policytagmanagerserialization.ImportTaxonomiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_taxonomies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def export_taxonomies(self, + request: Optional[Union[policytagmanagerserialization.ExportTaxonomiesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanagerserialization.ExportTaxonomiesResponse: + r"""Exports taxonomies in the requested type and returns them, + including their policy tags. The requested taxonomies must + belong to the same project. + + This method generates ``SerializedTaxonomy`` protocol buffers + with nested policy tags that can be used as input for + ``ImportTaxonomies`` calls. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_export_taxonomies(): + # Create a client + client = datacatalog_v1.PolicyTagManagerSerializationClient() + + # Initialize request argument(s) + request = datacatalog_v1.ExportTaxonomiesRequest( + serialized_taxonomies=True, + parent="parent_value", + taxonomies=['taxonomies_value1', 'taxonomies_value2'], + ) + + # Make the request + response = client.export_taxonomies(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.ExportTaxonomiesRequest, dict]): + The request object. Request message for + [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.ExportTaxonomiesResponse: + Response message for + [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanagerserialization.ExportTaxonomiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanagerserialization.ExportTaxonomiesRequest): + request = policytagmanagerserialization.ExportTaxonomiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_taxonomies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "PolicyTagManagerSerializationClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "PolicyTagManagerSerializationClient", +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/__init__.py new file mode 100644 index 000000000000..faf2990e5837 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import PolicyTagManagerSerializationTransport +from .grpc import PolicyTagManagerSerializationGrpcTransport +from .grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PolicyTagManagerSerializationTransport]] +_transport_registry['grpc'] = PolicyTagManagerSerializationGrpcTransport +_transport_registry['grpc_asyncio'] = PolicyTagManagerSerializationGrpcAsyncIOTransport + +__all__ = ( + 'PolicyTagManagerSerializationTransport', + 'PolicyTagManagerSerializationGrpcTransport', + 'PolicyTagManagerSerializationGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/base.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/base.py new file mode 100644 index 000000000000..4c384efe5e4d --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/base.py @@ -0,0 +1,216 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.datacatalog_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanager +from google.cloud.datacatalog_v1.types import policytagmanagerserialization +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class PolicyTagManagerSerializationTransport(abc.ABC): + """Abstract transport class for PolicyTagManagerSerialization.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'datacatalog.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.replace_taxonomy: gapic_v1.method.wrap_method( + self.replace_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.import_taxonomies: gapic_v1.method.wrap_method( + self.import_taxonomies, + default_timeout=None, + client_info=client_info, + ), + self.export_taxonomies: gapic_v1.method.wrap_method( + self.export_taxonomies, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def replace_taxonomy(self) -> Callable[ + [policytagmanagerserialization.ReplaceTaxonomyRequest], + Union[ + policytagmanager.Taxonomy, + Awaitable[policytagmanager.Taxonomy] + ]]: + raise NotImplementedError() + + @property + def import_taxonomies(self) -> Callable[ + [policytagmanagerserialization.ImportTaxonomiesRequest], + Union[ + policytagmanagerserialization.ImportTaxonomiesResponse, + Awaitable[policytagmanagerserialization.ImportTaxonomiesResponse] + ]]: + raise NotImplementedError() + + @property + def export_taxonomies(self) -> Callable[ + [policytagmanagerserialization.ExportTaxonomiesRequest], + Union[ + policytagmanagerserialization.ExportTaxonomiesResponse, + Awaitable[policytagmanagerserialization.ExportTaxonomiesResponse] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'PolicyTagManagerSerializationTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc.py new file mode 100644 index 000000000000..ca57a7325676 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc.py @@ -0,0 +1,422 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanager +from google.cloud.datacatalog_v1.types import policytagmanagerserialization +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO + + +class PolicyTagManagerSerializationGrpcTransport(PolicyTagManagerSerializationTransport): + """gRPC backend transport for PolicyTagManagerSerialization. + + Policy Tag Manager Serialization API service allows you to + manipulate your policy tags and taxonomies in a serialized + format. + + Taxonomy is a hierarchical group of policy tags. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def replace_taxonomy(self) -> Callable[ + [policytagmanagerserialization.ReplaceTaxonomyRequest], + policytagmanager.Taxonomy]: + r"""Return a callable for the replace taxonomy method over gRPC. + + Replaces (updates) a taxonomy and all its policy tags. + + The taxonomy and its entire hierarchy of policy tags must be + represented literally by ``SerializedTaxonomy`` and the nested + ``SerializedPolicyTag`` messages. + + This operation automatically does the following: + + - Deletes the existing policy tags that are missing from the + ``SerializedPolicyTag``. + - Creates policy tags that don't have resource names. They are + considered new. + - Updates policy tags with valid resources names accordingly. + + Returns: + Callable[[~.ReplaceTaxonomyRequest], + ~.Taxonomy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'replace_taxonomy' not in self._stubs: + self._stubs['replace_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManagerSerialization/ReplaceTaxonomy', + request_serializer=policytagmanagerserialization.ReplaceTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs['replace_taxonomy'] + + @property + def import_taxonomies(self) -> Callable[ + [policytagmanagerserialization.ImportTaxonomiesRequest], + policytagmanagerserialization.ImportTaxonomiesResponse]: + r"""Return a callable for the import taxonomies method over gRPC. + + Creates new taxonomies (including their policy tags) + in a given project by importing from inlined or + cross-regional sources. + + For a cross-regional source, new taxonomies are created + by copying from a source in another region. + + For an inlined source, taxonomies and policy tags are + created in bulk using nested protocol buffer structures. + + Returns: + Callable[[~.ImportTaxonomiesRequest], + ~.ImportTaxonomiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'import_taxonomies' not in self._stubs: + self._stubs['import_taxonomies'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManagerSerialization/ImportTaxonomies', + request_serializer=policytagmanagerserialization.ImportTaxonomiesRequest.serialize, + response_deserializer=policytagmanagerserialization.ImportTaxonomiesResponse.deserialize, + ) + return self._stubs['import_taxonomies'] + + @property + def export_taxonomies(self) -> Callable[ + [policytagmanagerserialization.ExportTaxonomiesRequest], + policytagmanagerserialization.ExportTaxonomiesResponse]: + r"""Return a callable for the export taxonomies method over gRPC. + + Exports taxonomies in the requested type and returns them, + including their policy tags. The requested taxonomies must + belong to the same project. + + This method generates ``SerializedTaxonomy`` protocol buffers + with nested policy tags that can be used as input for + ``ImportTaxonomies`` calls. + + Returns: + Callable[[~.ExportTaxonomiesRequest], + ~.ExportTaxonomiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'export_taxonomies' not in self._stubs: + self._stubs['export_taxonomies'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManagerSerialization/ExportTaxonomies', + request_serializer=policytagmanagerserialization.ExportTaxonomiesRequest.serialize, + response_deserializer=policytagmanagerserialization.ExportTaxonomiesResponse.deserialize, + ) + return self._stubs['export_taxonomies'] + + def close(self): + self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'PolicyTagManagerSerializationGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py new file mode 100644 index 000000000000..310cb52405b7 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py @@ -0,0 +1,421 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanager +from google.cloud.datacatalog_v1.types import policytagmanagerserialization +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO +from .grpc import PolicyTagManagerSerializationGrpcTransport + + +class PolicyTagManagerSerializationGrpcAsyncIOTransport(PolicyTagManagerSerializationTransport): + """gRPC AsyncIO backend transport for PolicyTagManagerSerialization. + + Policy Tag Manager Serialization API service allows you to + manipulate your policy tags and taxonomies in a serialized + format. + + Taxonomy is a hierarchical group of policy tags. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def replace_taxonomy(self) -> Callable[ + [policytagmanagerserialization.ReplaceTaxonomyRequest], + Awaitable[policytagmanager.Taxonomy]]: + r"""Return a callable for the replace taxonomy method over gRPC. + + Replaces (updates) a taxonomy and all its policy tags. + + The taxonomy and its entire hierarchy of policy tags must be + represented literally by ``SerializedTaxonomy`` and the nested + ``SerializedPolicyTag`` messages. + + This operation automatically does the following: + + - Deletes the existing policy tags that are missing from the + ``SerializedPolicyTag``. + - Creates policy tags that don't have resource names. They are + considered new. + - Updates policy tags with valid resources names accordingly. + + Returns: + Callable[[~.ReplaceTaxonomyRequest], + Awaitable[~.Taxonomy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'replace_taxonomy' not in self._stubs: + self._stubs['replace_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManagerSerialization/ReplaceTaxonomy', + request_serializer=policytagmanagerserialization.ReplaceTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs['replace_taxonomy'] + + @property + def import_taxonomies(self) -> Callable[ + [policytagmanagerserialization.ImportTaxonomiesRequest], + Awaitable[policytagmanagerserialization.ImportTaxonomiesResponse]]: + r"""Return a callable for the import taxonomies method over gRPC. + + Creates new taxonomies (including their policy tags) + in a given project by importing from inlined or + cross-regional sources. + + For a cross-regional source, new taxonomies are created + by copying from a source in another region. + + For an inlined source, taxonomies and policy tags are + created in bulk using nested protocol buffer structures. + + Returns: + Callable[[~.ImportTaxonomiesRequest], + Awaitable[~.ImportTaxonomiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'import_taxonomies' not in self._stubs: + self._stubs['import_taxonomies'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManagerSerialization/ImportTaxonomies', + request_serializer=policytagmanagerserialization.ImportTaxonomiesRequest.serialize, + response_deserializer=policytagmanagerserialization.ImportTaxonomiesResponse.deserialize, + ) + return self._stubs['import_taxonomies'] + + @property + def export_taxonomies(self) -> Callable[ + [policytagmanagerserialization.ExportTaxonomiesRequest], + Awaitable[policytagmanagerserialization.ExportTaxonomiesResponse]]: + r"""Return a callable for the export taxonomies method over gRPC. + + Exports taxonomies in the requested type and returns them, + including their policy tags. The requested taxonomies must + belong to the same project. + + This method generates ``SerializedTaxonomy`` protocol buffers + with nested policy tags that can be used as input for + ``ImportTaxonomies`` calls. + + Returns: + Callable[[~.ExportTaxonomiesRequest], + Awaitable[~.ExportTaxonomiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'export_taxonomies' not in self._stubs: + self._stubs['export_taxonomies'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1.PolicyTagManagerSerialization/ExportTaxonomies', + request_serializer=policytagmanagerserialization.ExportTaxonomiesRequest.serialize, + response_deserializer=policytagmanagerserialization.ExportTaxonomiesResponse.deserialize, + ) + return self._stubs['export_taxonomies'] + + def close(self): + return self.grpc_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ( + 'PolicyTagManagerSerializationGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/__init__.py new file mode 100644 index 000000000000..c44baf372a05 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/__init__.py @@ -0,0 +1,288 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .bigquery import ( + BigQueryConnectionSpec, + BigQueryRoutineSpec, + CloudSqlBigQueryConnectionSpec, +) +from .common import ( + PersonalDetails, + IntegratedSystem, + ManagingSystem, +) +from .data_source import ( + DataSource, + StorageProperties, +) +from .datacatalog import ( + BusinessContext, + CloudBigtableInstanceSpec, + CloudBigtableSystemSpec, + Contacts, + CreateEntryGroupRequest, + CreateEntryRequest, + CreateTagRequest, + CreateTagTemplateFieldRequest, + CreateTagTemplateRequest, + DatabaseTableSpec, + DatasetSpec, + DataSourceConnectionSpec, + DeleteEntryGroupRequest, + DeleteEntryRequest, + DeleteTagRequest, + DeleteTagTemplateFieldRequest, + DeleteTagTemplateRequest, + Entry, + EntryGroup, + EntryOverview, + FilesetSpec, + GetEntryGroupRequest, + GetEntryRequest, + GetTagTemplateRequest, + ImportEntriesMetadata, + ImportEntriesRequest, + ImportEntriesResponse, + ListEntriesRequest, + ListEntriesResponse, + ListEntryGroupsRequest, + ListEntryGroupsResponse, + ListTagsRequest, + ListTagsResponse, + LookerSystemSpec, + LookupEntryRequest, + ModelSpec, + ModifyEntryContactsRequest, + ModifyEntryOverviewRequest, + ReconcileTagsMetadata, + ReconcileTagsRequest, + ReconcileTagsResponse, + RenameTagTemplateFieldEnumValueRequest, + RenameTagTemplateFieldRequest, + RoutineSpec, + SearchCatalogRequest, + SearchCatalogResponse, + ServiceSpec, + SqlDatabaseSystemSpec, + StarEntryRequest, + StarEntryResponse, + UnstarEntryRequest, + UnstarEntryResponse, + UpdateEntryGroupRequest, + UpdateEntryRequest, + UpdateTagRequest, + UpdateTagTemplateFieldRequest, + UpdateTagTemplateRequest, + VertexDatasetSpec, + VertexModelSourceInfo, + VertexModelSpec, + EntryType, +) +from .dataplex_spec import ( + DataplexExternalTable, + DataplexFilesetSpec, + DataplexSpec, + DataplexTableSpec, +) +from .dump_content import ( + DumpItem, + TaggedEntry, +) +from .gcs_fileset_spec import ( + GcsFilesetSpec, + GcsFileSpec, +) +from .physical_schema import ( + PhysicalSchema, +) +from .policytagmanager import ( + CreatePolicyTagRequest, + CreateTaxonomyRequest, + DeletePolicyTagRequest, + DeleteTaxonomyRequest, + GetPolicyTagRequest, + GetTaxonomyRequest, + ListPolicyTagsRequest, + ListPolicyTagsResponse, + ListTaxonomiesRequest, + ListTaxonomiesResponse, + PolicyTag, + Taxonomy, + UpdatePolicyTagRequest, + UpdateTaxonomyRequest, +) +from .policytagmanagerserialization import ( + CrossRegionalSource, + ExportTaxonomiesRequest, + ExportTaxonomiesResponse, + ImportTaxonomiesRequest, + ImportTaxonomiesResponse, + InlineSource, + ReplaceTaxonomyRequest, + SerializedPolicyTag, + SerializedTaxonomy, +) +from .schema import ( + ColumnSchema, + Schema, +) +from .search import ( + SearchCatalogResult, + SearchResultType, +) +from .table_spec import ( + BigQueryDateShardedSpec, + BigQueryTableSpec, + TableSpec, + ViewSpec, + TableSourceType, +) +from .tags import ( + FieldType, + Tag, + TagField, + TagTemplate, + TagTemplateField, +) +from .timestamps import ( + SystemTimestamps, +) +from .usage import ( + CommonUsageStats, + UsageSignal, + UsageStats, +) + +__all__ = ( + 'BigQueryConnectionSpec', + 'BigQueryRoutineSpec', + 'CloudSqlBigQueryConnectionSpec', + 'PersonalDetails', + 'IntegratedSystem', + 'ManagingSystem', + 'DataSource', + 'StorageProperties', + 'BusinessContext', + 'CloudBigtableInstanceSpec', + 'CloudBigtableSystemSpec', + 'Contacts', + 'CreateEntryGroupRequest', + 'CreateEntryRequest', + 'CreateTagRequest', + 'CreateTagTemplateFieldRequest', + 'CreateTagTemplateRequest', + 'DatabaseTableSpec', + 'DatasetSpec', + 'DataSourceConnectionSpec', + 'DeleteEntryGroupRequest', + 'DeleteEntryRequest', + 'DeleteTagRequest', + 'DeleteTagTemplateFieldRequest', + 'DeleteTagTemplateRequest', + 'Entry', + 'EntryGroup', + 'EntryOverview', + 'FilesetSpec', + 'GetEntryGroupRequest', + 'GetEntryRequest', + 'GetTagTemplateRequest', + 'ImportEntriesMetadata', + 'ImportEntriesRequest', + 'ImportEntriesResponse', + 'ListEntriesRequest', + 'ListEntriesResponse', + 'ListEntryGroupsRequest', + 'ListEntryGroupsResponse', + 'ListTagsRequest', + 'ListTagsResponse', + 'LookerSystemSpec', + 'LookupEntryRequest', + 'ModelSpec', + 'ModifyEntryContactsRequest', + 'ModifyEntryOverviewRequest', + 'ReconcileTagsMetadata', + 'ReconcileTagsRequest', + 'ReconcileTagsResponse', + 'RenameTagTemplateFieldEnumValueRequest', + 'RenameTagTemplateFieldRequest', + 'RoutineSpec', + 'SearchCatalogRequest', + 'SearchCatalogResponse', + 'ServiceSpec', + 'SqlDatabaseSystemSpec', + 'StarEntryRequest', + 'StarEntryResponse', + 'UnstarEntryRequest', + 'UnstarEntryResponse', + 'UpdateEntryGroupRequest', + 'UpdateEntryRequest', + 'UpdateTagRequest', + 'UpdateTagTemplateFieldRequest', + 'UpdateTagTemplateRequest', + 'VertexDatasetSpec', + 'VertexModelSourceInfo', + 'VertexModelSpec', + 'EntryType', + 'DataplexExternalTable', + 'DataplexFilesetSpec', + 'DataplexSpec', + 'DataplexTableSpec', + 'DumpItem', + 'TaggedEntry', + 'GcsFilesetSpec', + 'GcsFileSpec', + 'PhysicalSchema', + 'CreatePolicyTagRequest', + 'CreateTaxonomyRequest', + 'DeletePolicyTagRequest', + 'DeleteTaxonomyRequest', + 'GetPolicyTagRequest', + 'GetTaxonomyRequest', + 'ListPolicyTagsRequest', + 'ListPolicyTagsResponse', + 'ListTaxonomiesRequest', + 'ListTaxonomiesResponse', + 'PolicyTag', + 'Taxonomy', + 'UpdatePolicyTagRequest', + 'UpdateTaxonomyRequest', + 'CrossRegionalSource', + 'ExportTaxonomiesRequest', + 'ExportTaxonomiesResponse', + 'ImportTaxonomiesRequest', + 'ImportTaxonomiesResponse', + 'InlineSource', + 'ReplaceTaxonomyRequest', + 'SerializedPolicyTag', + 'SerializedTaxonomy', + 'ColumnSchema', + 'Schema', + 'SearchCatalogResult', + 'SearchResultType', + 'BigQueryDateShardedSpec', + 'BigQueryTableSpec', + 'TableSpec', + 'ViewSpec', + 'TableSourceType', + 'FieldType', + 'Tag', + 'TagField', + 'TagTemplate', + 'TagTemplateField', + 'SystemTimestamps', + 'CommonUsageStats', + 'UsageSignal', + 'UsageStats', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/bigquery.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/bigquery.py new file mode 100644 index 000000000000..c6276afea7f5 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/bigquery.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1', + manifest={ + 'BigQueryConnectionSpec', + 'CloudSqlBigQueryConnectionSpec', + 'BigQueryRoutineSpec', + }, +) + + +class BigQueryConnectionSpec(proto.Message): + r"""Specification for the BigQuery connection. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + connection_type (google.cloud.datacatalog_v1.types.BigQueryConnectionSpec.ConnectionType): + The type of the BigQuery connection. + cloud_sql (google.cloud.datacatalog_v1.types.CloudSqlBigQueryConnectionSpec): + Specification for the BigQuery connection to + a Cloud SQL instance. + + This field is a member of `oneof`_ ``connection_spec``. + has_credential (bool): + True if there are credentials attached to the + BigQuery connection; false otherwise. + """ + class ConnectionType(proto.Enum): + r"""The type of the BigQuery connection. + + Values: + CONNECTION_TYPE_UNSPECIFIED (0): + Unspecified type. + CLOUD_SQL (1): + Cloud SQL connection. + """ + CONNECTION_TYPE_UNSPECIFIED = 0 + CLOUD_SQL = 1 + + connection_type: ConnectionType = proto.Field( + proto.ENUM, + number=1, + enum=ConnectionType, + ) + cloud_sql: 'CloudSqlBigQueryConnectionSpec' = proto.Field( + proto.MESSAGE, + number=2, + oneof='connection_spec', + message='CloudSqlBigQueryConnectionSpec', + ) + has_credential: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class CloudSqlBigQueryConnectionSpec(proto.Message): + r"""Specification for the BigQuery connection to a Cloud SQL + instance. + + Attributes: + instance_id (str): + Cloud SQL instance ID in the format of + ``project:location:instance``. + database (str): + Database name. + type_ (google.cloud.datacatalog_v1.types.CloudSqlBigQueryConnectionSpec.DatabaseType): + Type of the Cloud SQL database. + """ + class DatabaseType(proto.Enum): + r"""Supported Cloud SQL database types. + + Values: + DATABASE_TYPE_UNSPECIFIED (0): + Unspecified database type. + POSTGRES (1): + Cloud SQL for PostgreSQL. + MYSQL (2): + Cloud SQL for MySQL. + """ + DATABASE_TYPE_UNSPECIFIED = 0 + POSTGRES = 1 + MYSQL = 2 + + instance_id: str = proto.Field( + proto.STRING, + number=1, + ) + database: str = proto.Field( + proto.STRING, + number=2, + ) + type_: DatabaseType = proto.Field( + proto.ENUM, + number=3, + enum=DatabaseType, + ) + + +class BigQueryRoutineSpec(proto.Message): + r"""Fields specific for BigQuery routines. + + Attributes: + imported_libraries (MutableSequence[str]): + Paths of the imported libraries. + """ + + imported_libraries: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/common.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/common.py new file mode 100644 index 000000000000..cd5c33035aa8 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/common.py @@ -0,0 +1,112 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1', + manifest={ + 'IntegratedSystem', + 'ManagingSystem', + 'PersonalDetails', + }, +) + + +class IntegratedSystem(proto.Enum): + r"""This enum lists all the systems that Data Catalog integrates + with. + + Values: + INTEGRATED_SYSTEM_UNSPECIFIED (0): + Default unknown system. + BIGQUERY (1): + BigQuery. + CLOUD_PUBSUB (2): + Cloud Pub/Sub. + DATAPROC_METASTORE (3): + Dataproc Metastore. + DATAPLEX (4): + Dataplex. + CLOUD_SPANNER (6): + Cloud Spanner + CLOUD_BIGTABLE (7): + Cloud Bigtable + CLOUD_SQL (8): + Cloud Sql + LOOKER (9): + Looker + VERTEX_AI (10): + Vertex AI + """ + INTEGRATED_SYSTEM_UNSPECIFIED = 0 + BIGQUERY = 1 + CLOUD_PUBSUB = 2 + DATAPROC_METASTORE = 3 + DATAPLEX = 4 + CLOUD_SPANNER = 6 + CLOUD_BIGTABLE = 7 + CLOUD_SQL = 8 + LOOKER = 9 + VERTEX_AI = 10 + + +class ManagingSystem(proto.Enum): + r"""This enum describes all the systems that manage + Taxonomy and PolicyTag resources in DataCatalog. + + Values: + MANAGING_SYSTEM_UNSPECIFIED (0): + Default value + MANAGING_SYSTEM_DATAPLEX (1): + Dataplex. + MANAGING_SYSTEM_OTHER (2): + Other + """ + MANAGING_SYSTEM_UNSPECIFIED = 0 + MANAGING_SYSTEM_DATAPLEX = 1 + MANAGING_SYSTEM_OTHER = 2 + + +class PersonalDetails(proto.Message): + r"""Entry metadata relevant only to the user and private to them. + + Attributes: + starred (bool): + True if the entry is starred by the user; + false otherwise. + star_time (google.protobuf.timestamp_pb2.Timestamp): + Set if the entry is starred; unset otherwise. + """ + + starred: bool = proto.Field( + proto.BOOL, + number=1, + ) + star_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/data_source.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/data_source.py new file mode 100644 index 000000000000..2d497ec1370c --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/data_source.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1', + manifest={ + 'DataSource', + 'StorageProperties', + }, +) + + +class DataSource(proto.Message): + r"""Physical location of an entry. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + service (google.cloud.datacatalog_v1.types.DataSource.Service): + Service that physically stores the data. + resource (str): + Full name of a resource as defined by the service. For + example: + + ``//bigquery.googleapis.com/projects/{PROJECT_ID}/locations/{LOCATION}/datasets/{DATASET_ID}/tables/{TABLE_ID}`` + source_entry (str): + Output only. Data Catalog entry name, if + applicable. + storage_properties (google.cloud.datacatalog_v1.types.StorageProperties): + Detailed properties of the underlying + storage. + + This field is a member of `oneof`_ ``properties``. + """ + class Service(proto.Enum): + r"""Name of a service that stores the data. + + Values: + SERVICE_UNSPECIFIED (0): + Default unknown service. + CLOUD_STORAGE (1): + Google Cloud Storage service. + BIGQUERY (2): + BigQuery service. + """ + SERVICE_UNSPECIFIED = 0 + CLOUD_STORAGE = 1 + BIGQUERY = 2 + + service: Service = proto.Field( + proto.ENUM, + number=1, + enum=Service, + ) + resource: str = proto.Field( + proto.STRING, + number=2, + ) + source_entry: str = proto.Field( + proto.STRING, + number=3, + ) + storage_properties: 'StorageProperties' = proto.Field( + proto.MESSAGE, + number=4, + oneof='properties', + message='StorageProperties', + ) + + +class StorageProperties(proto.Message): + r"""Details the properties of the underlying storage. + + Attributes: + file_pattern (MutableSequence[str]): + Patterns to identify a set of files for this fileset. + + Examples of a valid ``file_pattern``: + + - ``gs://bucket_name/dir/*``: matches all files in the + ``bucket_name/dir`` directory + - ``gs://bucket_name/dir/**``: matches all files in the + ``bucket_name/dir`` and all subdirectories recursively + - ``gs://bucket_name/file*``: matches files prefixed by + ``file`` in ``bucket_name`` + - ``gs://bucket_name/??.txt``: matches files with two + characters followed by ``.txt`` in ``bucket_name`` + - ``gs://bucket_name/[aeiou].txt``: matches files that + contain a single vowel character followed by ``.txt`` in + ``bucket_name`` + - ``gs://bucket_name/[a-m].txt``: matches files that + contain ``a``, ``b``, ... or ``m`` followed by ``.txt`` + in ``bucket_name`` + - ``gs://bucket_name/a/*/b``: matches all files in + ``bucket_name`` that match the ``a/*/b`` pattern, such as + ``a/c/b``, ``a/d/b`` + - ``gs://another_bucket/a.txt``: matches + ``gs://another_bucket/a.txt`` + file_type (str): + File type in MIME format, for example, ``text/plain``. + """ + + file_pattern: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + file_type: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/datacatalog.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/datacatalog.py new file mode 100644 index 000000000000..831fc14fc2fd --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/datacatalog.py @@ -0,0 +1,2727 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.datacatalog_v1.types import bigquery +from google.cloud.datacatalog_v1.types import common +from google.cloud.datacatalog_v1.types import data_source as gcd_data_source +from google.cloud.datacatalog_v1.types import dataplex_spec +from google.cloud.datacatalog_v1.types import gcs_fileset_spec as gcd_gcs_fileset_spec +from google.cloud.datacatalog_v1.types import schema as gcd_schema +from google.cloud.datacatalog_v1.types import search +from google.cloud.datacatalog_v1.types import table_spec +from google.cloud.datacatalog_v1.types import tags as gcd_tags +from google.cloud.datacatalog_v1.types import timestamps +from google.cloud.datacatalog_v1.types import usage +from google.protobuf import field_mask_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1', + manifest={ + 'EntryType', + 'SearchCatalogRequest', + 'SearchCatalogResponse', + 'CreateEntryGroupRequest', + 'UpdateEntryGroupRequest', + 'GetEntryGroupRequest', + 'DeleteEntryGroupRequest', + 'ListEntryGroupsRequest', + 'ListEntryGroupsResponse', + 'CreateEntryRequest', + 'UpdateEntryRequest', + 'DeleteEntryRequest', + 'GetEntryRequest', + 'LookupEntryRequest', + 'Entry', + 'DatabaseTableSpec', + 'FilesetSpec', + 'DataSourceConnectionSpec', + 'RoutineSpec', + 'DatasetSpec', + 'SqlDatabaseSystemSpec', + 'LookerSystemSpec', + 'CloudBigtableSystemSpec', + 'CloudBigtableInstanceSpec', + 'ServiceSpec', + 'VertexModelSourceInfo', + 'VertexModelSpec', + 'VertexDatasetSpec', + 'ModelSpec', + 'BusinessContext', + 'EntryOverview', + 'Contacts', + 'EntryGroup', + 'CreateTagTemplateRequest', + 'GetTagTemplateRequest', + 'UpdateTagTemplateRequest', + 'DeleteTagTemplateRequest', + 'CreateTagRequest', + 'UpdateTagRequest', + 'DeleteTagRequest', + 'CreateTagTemplateFieldRequest', + 'UpdateTagTemplateFieldRequest', + 'RenameTagTemplateFieldRequest', + 'RenameTagTemplateFieldEnumValueRequest', + 'DeleteTagTemplateFieldRequest', + 'ListTagsRequest', + 'ListTagsResponse', + 'ReconcileTagsRequest', + 'ReconcileTagsResponse', + 'ReconcileTagsMetadata', + 'ListEntriesRequest', + 'ListEntriesResponse', + 'StarEntryRequest', + 'StarEntryResponse', + 'UnstarEntryRequest', + 'UnstarEntryResponse', + 'ImportEntriesRequest', + 'ImportEntriesResponse', + 'ImportEntriesMetadata', + 'ModifyEntryOverviewRequest', + 'ModifyEntryContactsRequest', + }, +) + + +class EntryType(proto.Enum): + r"""Metadata automatically ingested from Google Cloud resources like + BigQuery tables or Pub/Sub topics always uses enum values from + ``EntryType`` as the type of entry. + + Other sources of metadata like Hive or Oracle databases can identify + the type by either using one of the enum values from ``EntryType`` + (for example, ``FILESET`` for a Cloud Storage fileset) or specifying + a custom value using the ```Entry`` <#resource:-entry>`__ field + ``user_specified_type``. For more information, see `Surface files + from Cloud Storage with fileset + entries `__ or `Create custom + entries for your data + sources `__. + + Values: + ENTRY_TYPE_UNSPECIFIED (0): + Default unknown type. + TABLE (2): + The entry type that has a GoogleSQL schema, + including logical views. + MODEL (5): + The type of models. + + For more information, see `Supported models in BigQuery + ML `__. + DATA_STREAM (3): + An entry type for streaming entries. For + example, a Pub/Sub topic. + FILESET (4): + An entry type for a set of files or objects. + For example, a Cloud Storage fileset. + CLUSTER (6): + A group of servers that work together. For + example, a Kafka cluster. + DATABASE (7): + A database. + DATA_SOURCE_CONNECTION (8): + Connection to a data source. For example, a + BigQuery connection. + ROUTINE (9): + Routine, for example, a BigQuery routine. + LAKE (10): + A Dataplex lake. + ZONE (11): + A Dataplex zone. + SERVICE (14): + A service, for example, a Dataproc Metastore + service. + DATABASE_SCHEMA (15): + Schema within a relational database. + DASHBOARD (16): + A Dashboard, for example from Looker. + EXPLORE (17): + A Looker Explore. + + For more information, see [Looker Explore API] + (https://developers.looker.com/api/explorer/4.0/methods/LookmlModel/lookml_model_explore). + LOOK (18): + A Looker Look. + + For more information, see [Looker Look API] + (https://developers.looker.com/api/explorer/4.0/methods/Look). + """ + ENTRY_TYPE_UNSPECIFIED = 0 + TABLE = 2 + MODEL = 5 + DATA_STREAM = 3 + FILESET = 4 + CLUSTER = 6 + DATABASE = 7 + DATA_SOURCE_CONNECTION = 8 + ROUTINE = 9 + LAKE = 10 + ZONE = 11 + SERVICE = 14 + DATABASE_SCHEMA = 15 + DASHBOARD = 16 + EXPLORE = 17 + LOOK = 18 + + +class SearchCatalogRequest(proto.Message): + r"""Request message for + [SearchCatalog][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog]. + + Attributes: + scope (google.cloud.datacatalog_v1.types.SearchCatalogRequest.Scope): + Required. The scope of this search request. + + The ``scope`` is invalid if ``include_org_ids``, + ``include_project_ids`` are empty AND + ``include_gcp_public_datasets`` is set to ``false``. In this + case, the request returns an error. + query (str): + Optional. The query string with a minimum of 3 characters + and specific syntax. For more information, see `Data Catalog + search + syntax `__. + + An empty query string returns all data assets (in the + specified scope) that you have access to. + + A query string can be a simple ``xyz`` or qualified by + predicates: + + - ``name:x`` + - ``column:y`` + - ``description:z`` + page_size (int): + Upper bound on the number of results you can + get in a single response. + Can't be negative or 0, defaults to 10 in this + case. The maximum number is 1000. If exceeded, + throws an "invalid argument" exception. + page_token (str): + Optional. Pagination token that, if specified, returns the + next page of search results. If empty, returns the first + page. + + This token is returned in the + [SearchCatalogResponse.next_page_token][google.cloud.datacatalog.v1.SearchCatalogResponse.next_page_token] + field of the response to a previous + [SearchCatalogRequest][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog] + call. + order_by (str): + Specifies the order of results. + + Currently supported case-sensitive values are: + + - ``relevance`` that can only be descending + - ``last_modified_timestamp [asc|desc]`` with descending + (``desc``) as default + - ``default`` that can only be descending + + Search queries don't guarantee full recall. Results that + match your query might not be returned, even in subsequent + result pages. Additionally, returned (and not returned) + results can vary if you repeat search queries. If you are + experiencing recall issues and you don't have to fetch the + results in any specific order, consider setting this + parameter to ``default``. + + If this parameter is omitted, it defaults to the descending + ``relevance``. + admin_search (bool): + Optional. If set, use searchAll permission granted on + organizations from ``include_org_ids`` and projects from + ``include_project_ids`` instead of the fine grained per + resource permissions when filtering the search results. The + only allowed ``order_by`` criteria for admin_search mode is + ``default``. Using this flags guarantees a full recall of + the search results. + """ + + class Scope(proto.Message): + r"""The criteria that select the subspace used for query + matching. + + Attributes: + include_org_ids (MutableSequence[str]): + The list of organization IDs to search within. + + To find your organization ID, follow the steps from + [Creating and managing organizations] + (/resource-manager/docs/creating-managing-organization). + include_project_ids (MutableSequence[str]): + The list of project IDs to search within. + + For more information on the distinction between project + names, IDs, and numbers, see + `Projects `__. + include_gcp_public_datasets (bool): + If ``true``, include Google Cloud public datasets in search + results. By default, they are excluded. + + See `Google Cloud Public Datasets `__ for + more information. + restricted_locations (MutableSequence[str]): + Optional. The list of locations to search within. If empty, + all locations are searched. + + Returns an error if any location in the list isn't one of + the `Supported + regions `__. + + If a location is unreachable, its name is returned in the + ``SearchCatalogResponse.unreachable`` field. To get + additional information on the error, repeat the search + request and set the location name as the value of this + parameter. + starred_only (bool): + Optional. If ``true``, search only among starred entries. + + By default, all results are returned, starred or not. + include_public_tag_templates (bool): + Optional. This field is deprecated. The + search mechanism for public and private tag + templates is the same. + """ + + include_org_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + include_project_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + include_gcp_public_datasets: bool = proto.Field( + proto.BOOL, + number=7, + ) + restricted_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=16, + ) + starred_only: bool = proto.Field( + proto.BOOL, + number=18, + ) + include_public_tag_templates: bool = proto.Field( + proto.BOOL, + number=19, + ) + + scope: Scope = proto.Field( + proto.MESSAGE, + number=6, + message=Scope, + ) + query: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + admin_search: bool = proto.Field( + proto.BOOL, + number=17, + ) + + +class SearchCatalogResponse(proto.Message): + r"""Response message for + [SearchCatalog][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog]. + + Attributes: + results (MutableSequence[google.cloud.datacatalog_v1.types.SearchCatalogResult]): + Search results. + total_size (int): + The approximate total number of entries + matched by the query. + next_page_token (str): + Pagination token that can be used in + subsequent calls to retrieve the next page of + results. + unreachable (MutableSequence[str]): + Unreachable locations. Search results don't include data + from those locations. + + To get additional information on an error, repeat the search + request and restrict it to specific locations by setting the + ``SearchCatalogRequest.scope.restricted_locations`` + parameter. + """ + + @property + def raw_page(self): + return self + + results: MutableSequence[search.SearchCatalogResult] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=search.SearchCatalogResult, + ) + total_size: int = proto.Field( + proto.INT32, + number=2, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=3, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + + +class CreateEntryGroupRequest(proto.Message): + r"""Request message for + [CreateEntryGroup][google.cloud.datacatalog.v1.DataCatalog.CreateEntryGroup]. + + Attributes: + parent (str): + Required. The names of the project and + location that the new entry group belongs to. + + Note: The entry group itself and its child + resources might not be stored in the location + specified in its name. + entry_group_id (str): + Required. The ID of the entry group to create. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and must start with a letter or underscore. + The maximum size is 64 bytes when encoded in UTF-8. + entry_group (google.cloud.datacatalog_v1.types.EntryGroup): + The entry group to create. Defaults to empty. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + entry_group_id: str = proto.Field( + proto.STRING, + number=3, + ) + entry_group: 'EntryGroup' = proto.Field( + proto.MESSAGE, + number=2, + message='EntryGroup', + ) + + +class UpdateEntryGroupRequest(proto.Message): + r"""Request message for + [UpdateEntryGroup][google.cloud.datacatalog.v1.DataCatalog.UpdateEntryGroup]. + + Attributes: + entry_group (google.cloud.datacatalog_v1.types.EntryGroup): + Required. Updates for the entry group. The ``name`` field + must be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Names of fields whose values to overwrite on + an entry group. + If this parameter is absent or empty, all + modifiable fields are overwritten. If such + fields are non-required and omitted in the + request body, their values are emptied. + """ + + entry_group: 'EntryGroup' = proto.Field( + proto.MESSAGE, + number=1, + message='EntryGroup', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class GetEntryGroupRequest(proto.Message): + r"""Request message for + [GetEntryGroup][google.cloud.datacatalog.v1.DataCatalog.GetEntryGroup]. + + Attributes: + name (str): + Required. The name of the entry group to get. + read_mask (google.protobuf.field_mask_pb2.FieldMask): + The fields to return. If empty or omitted, + all fields are returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + read_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteEntryGroupRequest(proto.Message): + r"""Request message for + [DeleteEntryGroup][google.cloud.datacatalog.v1.DataCatalog.DeleteEntryGroup]. + + Attributes: + name (str): + Required. The name of the entry group to + delete. + force (bool): + Optional. If true, deletes all entries in the + entry group. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class ListEntryGroupsRequest(proto.Message): + r"""Request message for + [ListEntryGroups][google.cloud.datacatalog.v1.DataCatalog.ListEntryGroups]. + + Attributes: + parent (str): + Required. The name of the location that + contains the entry groups to list. + Can be provided as a URL. + page_size (int): + Optional. The maximum number of items to return. + + Default is 10. Maximum limit is 1000. Throws an invalid + argument if ``page_size`` is greater than 1000. + page_token (str): + Optional. Pagination token that specifies the + next page to return. If empty, returns the first + page. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListEntryGroupsResponse(proto.Message): + r"""Response message for + [ListEntryGroups][google.cloud.datacatalog.v1.DataCatalog.ListEntryGroups]. + + Attributes: + entry_groups (MutableSequence[google.cloud.datacatalog_v1.types.EntryGroup]): + Entry group details. + next_page_token (str): + Pagination token to specify in the next call + to retrieve the next page of results. Empty if + there are no more items. + """ + + @property + def raw_page(self): + return self + + entry_groups: MutableSequence['EntryGroup'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='EntryGroup', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateEntryRequest(proto.Message): + r"""Request message for + [CreateEntry][google.cloud.datacatalog.v1.DataCatalog.CreateEntry]. + + Attributes: + parent (str): + Required. The name of the entry group this + entry belongs to. + Note: The entry itself and its child resources + might not be stored in the location specified in + its name. + entry_id (str): + Required. The ID of the entry to create. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + and underscores (_). The maximum size is 64 bytes when + encoded in UTF-8. + entry (google.cloud.datacatalog_v1.types.Entry): + Required. The entry to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + entry_id: str = proto.Field( + proto.STRING, + number=3, + ) + entry: 'Entry' = proto.Field( + proto.MESSAGE, + number=2, + message='Entry', + ) + + +class UpdateEntryRequest(proto.Message): + r"""Request message for + [UpdateEntry][google.cloud.datacatalog.v1.DataCatalog.UpdateEntry]. + + Attributes: + entry (google.cloud.datacatalog_v1.types.Entry): + Required. Updates for the entry. The ``name`` field must be + set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Names of fields whose values to overwrite on an entry. + + If this parameter is absent or empty, all modifiable fields + are overwritten. If such fields are non-required and omitted + in the request body, their values are emptied. + + You can modify only the fields listed below. + + For entries with type ``DATA_STREAM``: + + - ``schema`` + + For entries with type ``FILESET``: + + - ``schema`` + - ``display_name`` + - ``description`` + - ``gcs_fileset_spec`` + - ``gcs_fileset_spec.file_patterns`` + + For entries with ``user_specified_type``: + + - ``schema`` + - ``display_name`` + - ``description`` + - ``user_specified_type`` + - ``user_specified_system`` + - ``linked_resource`` + - ``source_system_timestamps`` + """ + + entry: 'Entry' = proto.Field( + proto.MESSAGE, + number=1, + message='Entry', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteEntryRequest(proto.Message): + r"""Request message for + [DeleteEntry][google.cloud.datacatalog.v1.DataCatalog.DeleteEntry]. + + Attributes: + name (str): + Required. The name of the entry to delete. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetEntryRequest(proto.Message): + r"""Request message for + [GetEntry][google.cloud.datacatalog.v1.DataCatalog.GetEntry]. + + Attributes: + name (str): + Required. The name of the entry to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class LookupEntryRequest(proto.Message): + r"""Request message for + [LookupEntry][google.cloud.datacatalog.v1.DataCatalog.LookupEntry]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + linked_resource (str): + The full name of the Google Cloud Platform resource the Data + Catalog entry represents. For more information, see [Full + Resource Name] + (https://cloud.google.com/apis/design/resource_names#full_resource_name). + + Full names are case-sensitive. For example: + + - ``//bigquery.googleapis.com/projects/{PROJECT_ID}/datasets/{DATASET_ID}/tables/{TABLE_ID}`` + - ``//pubsub.googleapis.com/projects/{PROJECT_ID}/topics/{TOPIC_ID}`` + + This field is a member of `oneof`_ ``target_name``. + sql_resource (str): + The SQL name of the entry. SQL names are case-sensitive. + + Examples: + + - ``pubsub.topic.{PROJECT_ID}.{TOPIC_ID}`` + - ``pubsub.topic.{PROJECT_ID}.``\ \`\ ``{TOPIC.ID.SEPARATED.WITH.DOTS}``\ \` + - ``bigquery.table.{PROJECT_ID}.{DATASET_ID}.{TABLE_ID}`` + - ``bigquery.dataset.{PROJECT_ID}.{DATASET_ID}`` + - ``datacatalog.entry.{PROJECT_ID}.{LOCATION_ID}.{ENTRY_GROUP_ID}.{ENTRY_ID}`` + + Identifiers (``*_ID``) should comply with the [Lexical + structure in Standard SQL] + (https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical). + + This field is a member of `oneof`_ ``target_name``. + fully_qualified_name (str): + `Fully Qualified Name + (FQN) `__ + of the resource. + + FQNs take two forms: + + - For non-regionalized resources: + + ``{SYSTEM}:{PROJECT}.{PATH_TO_RESOURCE_SEPARATED_WITH_DOTS}`` + + - For regionalized resources: + + ``{SYSTEM}:{PROJECT}.{LOCATION_ID}.{PATH_TO_RESOURCE_SEPARATED_WITH_DOTS}`` + + Example for a DPMS table: + + ``dataproc_metastore:{PROJECT_ID}.{LOCATION_ID}.{INSTANCE_ID}.{DATABASE_ID}.{TABLE_ID}`` + + This field is a member of `oneof`_ ``target_name``. + project (str): + Project where the lookup should be performed. Required to + lookup entry that is not a part of ``DPMS`` or ``DATAPLEX`` + ``integrated_system`` using its ``fully_qualified_name``. + Ignored in other cases. + location (str): + Location where the lookup should be performed. Required to + lookup entry that is not a part of ``DPMS`` or ``DATAPLEX`` + ``integrated_system`` using its ``fully_qualified_name``. + Ignored in other cases. + """ + + linked_resource: str = proto.Field( + proto.STRING, + number=1, + oneof='target_name', + ) + sql_resource: str = proto.Field( + proto.STRING, + number=3, + oneof='target_name', + ) + fully_qualified_name: str = proto.Field( + proto.STRING, + number=5, + oneof='target_name', + ) + project: str = proto.Field( + proto.STRING, + number=6, + ) + location: str = proto.Field( + proto.STRING, + number=7, + ) + + +class Entry(proto.Message): + r"""Entry metadata. A Data Catalog entry represents another resource in + Google Cloud Platform (such as a BigQuery dataset or a Pub/Sub + topic) or outside of it. You can use the ``linked_resource`` field + in the entry resource to refer to the original resource ID of the + source system. + + An entry resource contains resource details, for example, its + schema. Additionally, you can attach flexible metadata to an entry + in the form of a [Tag][google.cloud.datacatalog.v1.Tag]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. The resource name of an entry in + URL format. + Note: The entry itself and its child resources + might not be stored in the location specified in + its name. + linked_resource (str): + The resource this metadata entry refers to. + + For Google Cloud Platform resources, ``linked_resource`` is + the [Full Resource Name] + (https://cloud.google.com/apis/design/resource_names#full_resource_name). + For example, the ``linked_resource`` for a table resource + from BigQuery is: + + ``//bigquery.googleapis.com/projects/{PROJECT_ID}/datasets/{DATASET_ID}/tables/{TABLE_ID}`` + + Output only when the entry is one of the types in the + ``EntryType`` enum. + + For entries with a ``user_specified_type``, this field is + optional and defaults to an empty string. + + The resource string must contain only letters (a-z, A-Z), + numbers (0-9), underscores (_), periods (.), colons (:), + slashes (/), dashes (-), and hashes (#). The maximum size is + 200 bytes when encoded in UTF-8. + fully_qualified_name (str): + `Fully Qualified Name + (FQN) `__ + of the resource. Set automatically for entries representing + resources from synced systems. Settable only during + creation, and read-only later. Can be used for search and + lookup of the entries. + type_ (google.cloud.datacatalog_v1.types.EntryType): + The type of the entry. + + For details, see ```EntryType`` <#entrytype>`__. + + This field is a member of `oneof`_ ``entry_type``. + user_specified_type (str): + Custom entry type that doesn't match any of the values + allowed for input and listed in the ``EntryType`` enum. + + When creating an entry, first check the type values in the + enum. If there are no appropriate types for the new entry, + provide a custom value, for example, ``my_special_type``. + + The ``user_specified_type`` string has the following + limitations: + + - Is case insensitive. + - Must begin with a letter or underscore. + - Can only contain letters, numbers, and underscores. + - Must be at least 1 character and at most 64 characters + long. + + This field is a member of `oneof`_ ``entry_type``. + integrated_system (google.cloud.datacatalog_v1.types.IntegratedSystem): + Output only. Indicates the entry's source + system that Data Catalog integrates with, such + as BigQuery, Pub/Sub, or Dataproc Metastore. + + This field is a member of `oneof`_ ``system``. + user_specified_system (str): + Indicates the entry's source system that Data Catalog + doesn't automatically integrate with. + + The ``user_specified_system`` string has the following + limitations: + + - Is case insensitive. + - Must begin with a letter or underscore. + - Can only contain letters, numbers, and underscores. + - Must be at least 1 character and at most 64 characters + long. + + This field is a member of `oneof`_ ``system``. + sql_database_system_spec (google.cloud.datacatalog_v1.types.SqlDatabaseSystemSpec): + Specification that applies to a relational database system. + Only settable when ``user_specified_system`` is equal to + ``SQL_DATABASE`` + + This field is a member of `oneof`_ ``system_spec``. + looker_system_spec (google.cloud.datacatalog_v1.types.LookerSystemSpec): + Specification that applies to Looker sysstem. Only settable + when ``user_specified_system`` is equal to ``LOOKER`` + + This field is a member of `oneof`_ ``system_spec``. + cloud_bigtable_system_spec (google.cloud.datacatalog_v1.types.CloudBigtableSystemSpec): + Specification that applies to Cloud Bigtable system. Only + settable when ``integrated_system`` is equal to + ``CLOUD_BIGTABLE`` + + This field is a member of `oneof`_ ``system_spec``. + gcs_fileset_spec (google.cloud.datacatalog_v1.types.GcsFilesetSpec): + Specification that applies to a Cloud Storage fileset. Valid + only for entries with the ``FILESET`` type. + + This field is a member of `oneof`_ ``type_spec``. + bigquery_table_spec (google.cloud.datacatalog_v1.types.BigQueryTableSpec): + Output only. Specification that applies to a BigQuery table. + Valid only for entries with the ``TABLE`` type. + + This field is a member of `oneof`_ ``type_spec``. + bigquery_date_sharded_spec (google.cloud.datacatalog_v1.types.BigQueryDateShardedSpec): + Output only. Specification for a group of BigQuery tables + with the ``[prefix]YYYYMMDD`` name pattern. + + For more information, see [Introduction to partitioned + tables] + (https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning_versus_sharding). + + This field is a member of `oneof`_ ``type_spec``. + database_table_spec (google.cloud.datacatalog_v1.types.DatabaseTableSpec): + Specification that applies to a table resource. Valid only + for entries with the ``TABLE`` or ``EXPLORE`` type. + + This field is a member of `oneof`_ ``spec``. + data_source_connection_spec (google.cloud.datacatalog_v1.types.DataSourceConnectionSpec): + Specification that applies to a data source connection. + Valid only for entries with the ``DATA_SOURCE_CONNECTION`` + type. + + This field is a member of `oneof`_ ``spec``. + routine_spec (google.cloud.datacatalog_v1.types.RoutineSpec): + Specification that applies to a user-defined function or + procedure. Valid only for entries with the ``ROUTINE`` type. + + This field is a member of `oneof`_ ``spec``. + dataset_spec (google.cloud.datacatalog_v1.types.DatasetSpec): + Specification that applies to a dataset. + + This field is a member of `oneof`_ ``spec``. + fileset_spec (google.cloud.datacatalog_v1.types.FilesetSpec): + Specification that applies to a fileset resource. Valid only + for entries with the ``FILESET`` type. + + This field is a member of `oneof`_ ``spec``. + service_spec (google.cloud.datacatalog_v1.types.ServiceSpec): + Specification that applies to a Service + resource. + + This field is a member of `oneof`_ ``spec``. + model_spec (google.cloud.datacatalog_v1.types.ModelSpec): + Model specification. + + This field is a member of `oneof`_ ``spec``. + display_name (str): + Display name of an entry. + + The maximum size is 500 bytes when encoded in + UTF-8. Default value is an empty string. + description (str): + Entry description that can consist of several + sentences or paragraphs that describe entry + contents. + + The description must not contain Unicode + non-characters as well as C0 and C1 control + codes except tabs (HT), new lines (LF), carriage + returns (CR), and page breaks (FF). + The maximum size is 2000 bytes when encoded in + UTF-8. Default value is an empty string. + business_context (google.cloud.datacatalog_v1.types.BusinessContext): + Business Context of the entry. Not supported + for BigQuery datasets + schema (google.cloud.datacatalog_v1.types.Schema): + Schema of the entry. An entry might not have + any schema attached to it. + source_system_timestamps (google.cloud.datacatalog_v1.types.SystemTimestamps): + Timestamps from the underlying resource, not from the Data + Catalog entry. + + Output only when the entry has a system listed in the + ``IntegratedSystem`` enum. For entries with + ``user_specified_system``, this field is optional and + defaults to an empty timestamp. + usage_signal (google.cloud.datacatalog_v1.types.UsageSignal): + Resource usage statistics. + labels (MutableMapping[str, str]): + Cloud labels attached to the entry. + + In Data Catalog, you can create and modify + labels attached only to custom entries. Synced + entries have unmodifiable labels that come from + the source system. + data_source (google.cloud.datacatalog_v1.types.DataSource): + Output only. Physical location of the entry. + personal_details (google.cloud.datacatalog_v1.types.PersonalDetails): + Output only. Additional information related + to the entry. Private to the current user. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + linked_resource: str = proto.Field( + proto.STRING, + number=9, + ) + fully_qualified_name: str = proto.Field( + proto.STRING, + number=29, + ) + type_: 'EntryType' = proto.Field( + proto.ENUM, + number=2, + oneof='entry_type', + enum='EntryType', + ) + user_specified_type: str = proto.Field( + proto.STRING, + number=16, + oneof='entry_type', + ) + integrated_system: common.IntegratedSystem = proto.Field( + proto.ENUM, + number=17, + oneof='system', + enum=common.IntegratedSystem, + ) + user_specified_system: str = proto.Field( + proto.STRING, + number=18, + oneof='system', + ) + sql_database_system_spec: 'SqlDatabaseSystemSpec' = proto.Field( + proto.MESSAGE, + number=39, + oneof='system_spec', + message='SqlDatabaseSystemSpec', + ) + looker_system_spec: 'LookerSystemSpec' = proto.Field( + proto.MESSAGE, + number=40, + oneof='system_spec', + message='LookerSystemSpec', + ) + cloud_bigtable_system_spec: 'CloudBigtableSystemSpec' = proto.Field( + proto.MESSAGE, + number=41, + oneof='system_spec', + message='CloudBigtableSystemSpec', + ) + gcs_fileset_spec: gcd_gcs_fileset_spec.GcsFilesetSpec = proto.Field( + proto.MESSAGE, + number=6, + oneof='type_spec', + message=gcd_gcs_fileset_spec.GcsFilesetSpec, + ) + bigquery_table_spec: table_spec.BigQueryTableSpec = proto.Field( + proto.MESSAGE, + number=12, + oneof='type_spec', + message=table_spec.BigQueryTableSpec, + ) + bigquery_date_sharded_spec: table_spec.BigQueryDateShardedSpec = proto.Field( + proto.MESSAGE, + number=15, + oneof='type_spec', + message=table_spec.BigQueryDateShardedSpec, + ) + database_table_spec: 'DatabaseTableSpec' = proto.Field( + proto.MESSAGE, + number=24, + oneof='spec', + message='DatabaseTableSpec', + ) + data_source_connection_spec: 'DataSourceConnectionSpec' = proto.Field( + proto.MESSAGE, + number=27, + oneof='spec', + message='DataSourceConnectionSpec', + ) + routine_spec: 'RoutineSpec' = proto.Field( + proto.MESSAGE, + number=28, + oneof='spec', + message='RoutineSpec', + ) + dataset_spec: 'DatasetSpec' = proto.Field( + proto.MESSAGE, + number=32, + oneof='spec', + message='DatasetSpec', + ) + fileset_spec: 'FilesetSpec' = proto.Field( + proto.MESSAGE, + number=33, + oneof='spec', + message='FilesetSpec', + ) + service_spec: 'ServiceSpec' = proto.Field( + proto.MESSAGE, + number=42, + oneof='spec', + message='ServiceSpec', + ) + model_spec: 'ModelSpec' = proto.Field( + proto.MESSAGE, + number=43, + oneof='spec', + message='ModelSpec', + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + business_context: 'BusinessContext' = proto.Field( + proto.MESSAGE, + number=37, + message='BusinessContext', + ) + schema: gcd_schema.Schema = proto.Field( + proto.MESSAGE, + number=5, + message=gcd_schema.Schema, + ) + source_system_timestamps: timestamps.SystemTimestamps = proto.Field( + proto.MESSAGE, + number=7, + message=timestamps.SystemTimestamps, + ) + usage_signal: usage.UsageSignal = proto.Field( + proto.MESSAGE, + number=13, + message=usage.UsageSignal, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=14, + ) + data_source: gcd_data_source.DataSource = proto.Field( + proto.MESSAGE, + number=20, + message=gcd_data_source.DataSource, + ) + personal_details: common.PersonalDetails = proto.Field( + proto.MESSAGE, + number=26, + message=common.PersonalDetails, + ) + + +class DatabaseTableSpec(proto.Message): + r"""Specification that applies to a table resource. Valid only for + entries with the ``TABLE`` type. + + Attributes: + type_ (google.cloud.datacatalog_v1.types.DatabaseTableSpec.TableType): + Type of this table. + dataplex_table (google.cloud.datacatalog_v1.types.DataplexTableSpec): + Output only. Fields specific to a Dataplex + table and present only in the Dataplex table + entries. + database_view_spec (google.cloud.datacatalog_v1.types.DatabaseTableSpec.DatabaseViewSpec): + Spec what aplies to tables that are actually + views. Not set for "real" tables. + """ + class TableType(proto.Enum): + r"""Type of the table. + + Values: + TABLE_TYPE_UNSPECIFIED (0): + Default unknown table type. + NATIVE (1): + Native table. + EXTERNAL (2): + External table. + """ + TABLE_TYPE_UNSPECIFIED = 0 + NATIVE = 1 + EXTERNAL = 2 + + class DatabaseViewSpec(proto.Message): + r"""Specification that applies to database view. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + view_type (google.cloud.datacatalog_v1.types.DatabaseTableSpec.DatabaseViewSpec.ViewType): + Type of this view. + base_table (str): + Name of a singular table this view reflects + one to one. + + This field is a member of `oneof`_ ``source_definition``. + sql_query (str): + SQL query used to generate this view. + + This field is a member of `oneof`_ ``source_definition``. + """ + class ViewType(proto.Enum): + r"""Concrete type of the view. + + Values: + VIEW_TYPE_UNSPECIFIED (0): + Default unknown view type. + STANDARD_VIEW (1): + Standard view. + MATERIALIZED_VIEW (2): + Materialized view. + """ + VIEW_TYPE_UNSPECIFIED = 0 + STANDARD_VIEW = 1 + MATERIALIZED_VIEW = 2 + + view_type: 'DatabaseTableSpec.DatabaseViewSpec.ViewType' = proto.Field( + proto.ENUM, + number=1, + enum='DatabaseTableSpec.DatabaseViewSpec.ViewType', + ) + base_table: str = proto.Field( + proto.STRING, + number=2, + oneof='source_definition', + ) + sql_query: str = proto.Field( + proto.STRING, + number=3, + oneof='source_definition', + ) + + type_: TableType = proto.Field( + proto.ENUM, + number=1, + enum=TableType, + ) + dataplex_table: dataplex_spec.DataplexTableSpec = proto.Field( + proto.MESSAGE, + number=2, + message=dataplex_spec.DataplexTableSpec, + ) + database_view_spec: DatabaseViewSpec = proto.Field( + proto.MESSAGE, + number=3, + message=DatabaseViewSpec, + ) + + +class FilesetSpec(proto.Message): + r"""Specification that applies to a fileset. Valid only for + entries with the 'FILESET' type. + + Attributes: + dataplex_fileset (google.cloud.datacatalog_v1.types.DataplexFilesetSpec): + Fields specific to a Dataplex fileset and + present only in the Dataplex fileset entries. + """ + + dataplex_fileset: dataplex_spec.DataplexFilesetSpec = proto.Field( + proto.MESSAGE, + number=1, + message=dataplex_spec.DataplexFilesetSpec, + ) + + +class DataSourceConnectionSpec(proto.Message): + r"""Specification that applies to a data source connection. Valid only + for entries with the ``DATA_SOURCE_CONNECTION`` type. Only one of + internal specs can be set at the time, and cannot be changed later. + + Attributes: + bigquery_connection_spec (google.cloud.datacatalog_v1.types.BigQueryConnectionSpec): + Output only. Fields specific to BigQuery + connections. + """ + + bigquery_connection_spec: bigquery.BigQueryConnectionSpec = proto.Field( + proto.MESSAGE, + number=1, + message=bigquery.BigQueryConnectionSpec, + ) + + +class RoutineSpec(proto.Message): + r"""Specification that applies to a routine. Valid only for entries with + the ``ROUTINE`` type. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + routine_type (google.cloud.datacatalog_v1.types.RoutineSpec.RoutineType): + The type of the routine. + language (str): + The language the routine is written in. The exact value + depends on the source system. For BigQuery routines, + possible values are: + + - ``SQL`` + - ``JAVASCRIPT`` + routine_arguments (MutableSequence[google.cloud.datacatalog_v1.types.RoutineSpec.Argument]): + Arguments of the routine. + return_type (str): + Return type of the argument. The exact value + depends on the source system and the language. + definition_body (str): + The body of the routine. + bigquery_routine_spec (google.cloud.datacatalog_v1.types.BigQueryRoutineSpec): + Fields specific for BigQuery routines. + + This field is a member of `oneof`_ ``system_spec``. + """ + class RoutineType(proto.Enum): + r"""The fine-grained type of the routine. + + Values: + ROUTINE_TYPE_UNSPECIFIED (0): + Unspecified type. + SCALAR_FUNCTION (1): + Non-builtin permanent scalar function. + PROCEDURE (2): + Stored procedure. + """ + ROUTINE_TYPE_UNSPECIFIED = 0 + SCALAR_FUNCTION = 1 + PROCEDURE = 2 + + class Argument(proto.Message): + r"""Input or output argument of a function or stored procedure. + + Attributes: + name (str): + The name of the argument. A return argument + of a function might not have a name. + mode (google.cloud.datacatalog_v1.types.RoutineSpec.Argument.Mode): + Specifies whether the argument is input or + output. + type_ (str): + Type of the argument. The exact value depends + on the source system and the language. + """ + class Mode(proto.Enum): + r"""The input or output mode of the argument. + + Values: + MODE_UNSPECIFIED (0): + Unspecified mode. + IN (1): + The argument is input-only. + OUT (2): + The argument is output-only. + INOUT (3): + The argument is both an input and an output. + """ + MODE_UNSPECIFIED = 0 + IN = 1 + OUT = 2 + INOUT = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + mode: 'RoutineSpec.Argument.Mode' = proto.Field( + proto.ENUM, + number=2, + enum='RoutineSpec.Argument.Mode', + ) + type_: str = proto.Field( + proto.STRING, + number=3, + ) + + routine_type: RoutineType = proto.Field( + proto.ENUM, + number=1, + enum=RoutineType, + ) + language: str = proto.Field( + proto.STRING, + number=2, + ) + routine_arguments: MutableSequence[Argument] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=Argument, + ) + return_type: str = proto.Field( + proto.STRING, + number=4, + ) + definition_body: str = proto.Field( + proto.STRING, + number=5, + ) + bigquery_routine_spec: bigquery.BigQueryRoutineSpec = proto.Field( + proto.MESSAGE, + number=6, + oneof='system_spec', + message=bigquery.BigQueryRoutineSpec, + ) + + +class DatasetSpec(proto.Message): + r"""Specification that applies to a dataset. Valid only for entries with + the ``DATASET`` type. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + vertex_dataset_spec (google.cloud.datacatalog_v1.types.VertexDatasetSpec): + Vertex AI Dataset specific fields + + This field is a member of `oneof`_ ``system_spec``. + """ + + vertex_dataset_spec: 'VertexDatasetSpec' = proto.Field( + proto.MESSAGE, + number=2, + oneof='system_spec', + message='VertexDatasetSpec', + ) + + +class SqlDatabaseSystemSpec(proto.Message): + r"""Specification that applies to entries that are part ``SQL_DATABASE`` + system (user_specified_type) + + Attributes: + sql_engine (str): + SQL Database Engine. enum SqlEngine { UNDEFINED = 0; MY_SQL + = 1; POSTGRE_SQL = 2; SQL_SERVER = 3; } Engine of the + enclosing database instance. + database_version (str): + Version of the database engine. + instance_host (str): + Host of the SQL database enum InstanceHost { UNDEFINED = 0; + SELF_HOSTED = 1; CLOUD_SQL = 2; AMAZON_RDS = 3; AZURE_SQL = + 4; } Host of the enclousing database instance. + """ + + sql_engine: str = proto.Field( + proto.STRING, + number=1, + ) + database_version: str = proto.Field( + proto.STRING, + number=2, + ) + instance_host: str = proto.Field( + proto.STRING, + number=3, + ) + + +class LookerSystemSpec(proto.Message): + r"""Specification that applies to entries that are part ``LOOKER`` + system (user_specified_type) + + Attributes: + parent_instance_id (str): + ID of the parent Looker Instance. Empty if it does not + exist. Example value: ``someinstance.looker.com`` + parent_instance_display_name (str): + Name of the parent Looker Instance. Empty if + it does not exist. + parent_model_id (str): + ID of the parent Model. Empty if it does not + exist. + parent_model_display_name (str): + Name of the parent Model. Empty if it does + not exist. + parent_view_id (str): + ID of the parent View. Empty if it does not + exist. + parent_view_display_name (str): + Name of the parent View. Empty if it does not + exist. + """ + + parent_instance_id: str = proto.Field( + proto.STRING, + number=1, + ) + parent_instance_display_name: str = proto.Field( + proto.STRING, + number=2, + ) + parent_model_id: str = proto.Field( + proto.STRING, + number=3, + ) + parent_model_display_name: str = proto.Field( + proto.STRING, + number=4, + ) + parent_view_id: str = proto.Field( + proto.STRING, + number=5, + ) + parent_view_display_name: str = proto.Field( + proto.STRING, + number=6, + ) + + +class CloudBigtableSystemSpec(proto.Message): + r"""Specification that applies to all entries that are part of + ``CLOUD_BIGTABLE`` system (user_specified_type) + + Attributes: + instance_display_name (str): + Display name of the Instance. This is user + specified and different from the resource name. + """ + + instance_display_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CloudBigtableInstanceSpec(proto.Message): + r"""Specification that applies to Instance entries that are part of + ``CLOUD_BIGTABLE`` system. (user_specified_type) + + Attributes: + cloud_bigtable_cluster_specs (MutableSequence[google.cloud.datacatalog_v1.types.CloudBigtableInstanceSpec.CloudBigtableClusterSpec]): + The list of clusters for the Instance. + """ + + class CloudBigtableClusterSpec(proto.Message): + r"""Spec that applies to clusters of an Instance of Cloud + Bigtable. + + Attributes: + display_name (str): + Name of the cluster. + location (str): + Location of the cluster, typically a Cloud + zone. + type_ (str): + Type of the resource. For a cluster this + would be "CLUSTER". + linked_resource (str): + A link back to the parent resource, in this + case Instance. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + location: str = proto.Field( + proto.STRING, + number=2, + ) + type_: str = proto.Field( + proto.STRING, + number=3, + ) + linked_resource: str = proto.Field( + proto.STRING, + number=4, + ) + + cloud_bigtable_cluster_specs: MutableSequence[CloudBigtableClusterSpec] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=CloudBigtableClusterSpec, + ) + + +class ServiceSpec(proto.Message): + r"""Specification that applies to a Service resource. Valid only for + entries with the ``SERVICE`` type. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cloud_bigtable_instance_spec (google.cloud.datacatalog_v1.types.CloudBigtableInstanceSpec): + Specification that applies to Instance entries of + ``CLOUD_BIGTABLE`` system. + + This field is a member of `oneof`_ ``system_spec``. + """ + + cloud_bigtable_instance_spec: 'CloudBigtableInstanceSpec' = proto.Field( + proto.MESSAGE, + number=1, + oneof='system_spec', + message='CloudBigtableInstanceSpec', + ) + + +class VertexModelSourceInfo(proto.Message): + r"""Detail description of the source information of a Vertex + model. + + Attributes: + source_type (google.cloud.datacatalog_v1.types.VertexModelSourceInfo.ModelSourceType): + Type of the model source. + copy (bool): + If this Model is copy of another Model. If true then + [source_type][google.cloud.datacatalog.v1.VertexModelSourceInfo.source_type] + pertains to the original. + """ + class ModelSourceType(proto.Enum): + r"""Source of the model. + + Values: + MODEL_SOURCE_TYPE_UNSPECIFIED (0): + Should not be used. + AUTOML (1): + The Model is uploaded by automl training + pipeline. + CUSTOM (2): + The Model is uploaded by user or custom + training pipeline. + BQML (3): + The Model is registered and sync'ed from + BigQuery ML. + MODEL_GARDEN (4): + The Model is saved or tuned from Model + Garden. + """ + MODEL_SOURCE_TYPE_UNSPECIFIED = 0 + AUTOML = 1 + CUSTOM = 2 + BQML = 3 + MODEL_GARDEN = 4 + + source_type: ModelSourceType = proto.Field( + proto.ENUM, + number=1, + enum=ModelSourceType, + ) + copy: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class VertexModelSpec(proto.Message): + r"""Specification for vertex model resources. + + Attributes: + version_id (str): + The version ID of the model. + version_aliases (MutableSequence[str]): + User provided version aliases so that a model + version can be referenced via alias + version_description (str): + The description of this version. + vertex_model_source_info (google.cloud.datacatalog_v1.types.VertexModelSourceInfo): + Source of a Vertex model. + container_image_uri (str): + URI of the Docker image to be used as the + custom container for serving predictions. + """ + + version_id: str = proto.Field( + proto.STRING, + number=1, + ) + version_aliases: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + version_description: str = proto.Field( + proto.STRING, + number=3, + ) + vertex_model_source_info: 'VertexModelSourceInfo' = proto.Field( + proto.MESSAGE, + number=4, + message='VertexModelSourceInfo', + ) + container_image_uri: str = proto.Field( + proto.STRING, + number=5, + ) + + +class VertexDatasetSpec(proto.Message): + r"""Specification for vertex dataset resources. + + Attributes: + data_item_count (int): + The number of DataItems in this Dataset. Only + apply for non-structured Dataset. + data_type (google.cloud.datacatalog_v1.types.VertexDatasetSpec.DataType): + Type of the dataset. + """ + class DataType(proto.Enum): + r"""Type of data stored in the dataset. + + Values: + DATA_TYPE_UNSPECIFIED (0): + Should not be used. + TABLE (1): + Structured data dataset. + IMAGE (2): + Image dataset which supports + ImageClassification, ImageObjectDetection and + ImageSegmentation problems. + TEXT (3): + Document dataset which supports + TextClassification, TextExtraction and + TextSentiment problems. + VIDEO (4): + Video dataset which supports + VideoClassification, VideoObjectTracking and + VideoActionRecognition problems. + CONVERSATION (5): + Conversation dataset which supports + conversation problems. + TIME_SERIES (6): + TimeSeries dataset. + DOCUMENT (7): + Document dataset which supports + DocumentAnnotation problems. + TEXT_TO_SPEECH (8): + TextToSpeech dataset which supports + TextToSpeech problems. + TRANSLATION (9): + Translation dataset which supports + Translation problems. + STORE_VISION (10): + Store Vision dataset which is used for HITL + integration. + ENTERPRISE_KNOWLEDGE_GRAPH (11): + Enterprise Knowledge Graph dataset which is + used for HITL labeling integration. + TEXT_PROMPT (12): + Text prompt dataset which supports Large + Language Models. + """ + DATA_TYPE_UNSPECIFIED = 0 + TABLE = 1 + IMAGE = 2 + TEXT = 3 + VIDEO = 4 + CONVERSATION = 5 + TIME_SERIES = 6 + DOCUMENT = 7 + TEXT_TO_SPEECH = 8 + TRANSLATION = 9 + STORE_VISION = 10 + ENTERPRISE_KNOWLEDGE_GRAPH = 11 + TEXT_PROMPT = 12 + + data_item_count: int = proto.Field( + proto.INT64, + number=1, + ) + data_type: DataType = proto.Field( + proto.ENUM, + number=2, + enum=DataType, + ) + + +class ModelSpec(proto.Message): + r"""Specification that applies to a model. Valid only for entries with + the ``MODEL`` type. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + vertex_model_spec (google.cloud.datacatalog_v1.types.VertexModelSpec): + Specification for vertex model resources. + + This field is a member of `oneof`_ ``system_spec``. + """ + + vertex_model_spec: 'VertexModelSpec' = proto.Field( + proto.MESSAGE, + number=1, + oneof='system_spec', + message='VertexModelSpec', + ) + + +class BusinessContext(proto.Message): + r"""Business Context of the entry. + + Attributes: + entry_overview (google.cloud.datacatalog_v1.types.EntryOverview): + Entry overview fields for rich text + descriptions of entries. + contacts (google.cloud.datacatalog_v1.types.Contacts): + Contact people for the entry. + """ + + entry_overview: 'EntryOverview' = proto.Field( + proto.MESSAGE, + number=1, + message='EntryOverview', + ) + contacts: 'Contacts' = proto.Field( + proto.MESSAGE, + number=2, + message='Contacts', + ) + + +class EntryOverview(proto.Message): + r"""Entry overview fields for rich text descriptions of entries. + + Attributes: + overview (str): + Entry overview with support for rich text. + + The overview must only contain Unicode + characters, and should be formatted using HTML. + The maximum length is 10 MiB as this value holds + HTML descriptions including encoded images. The + maximum length of the text without images is 100 + KiB. + """ + + overview: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Contacts(proto.Message): + r"""Contact people for the entry. + + Attributes: + people (MutableSequence[google.cloud.datacatalog_v1.types.Contacts.Person]): + The list of contact people for the entry. + """ + + class Person(proto.Message): + r"""A contact person for the entry. + + Attributes: + designation (str): + Designation of the person, for example, Data + Steward. + email (str): + Email of the person in the format of ``john.doe@xyz``, + ````, or ``John Doe``. + """ + + designation: str = proto.Field( + proto.STRING, + number=1, + ) + email: str = proto.Field( + proto.STRING, + number=2, + ) + + people: MutableSequence[Person] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Person, + ) + + +class EntryGroup(proto.Message): + r"""Entry group metadata. + + An ``EntryGroup`` resource represents a logical grouping of zero or + more Data Catalog [Entry][google.cloud.datacatalog.v1.Entry] + resources. + + Attributes: + name (str): + The resource name of the entry group in URL + format. + Note: The entry group itself and its child + resources might not be stored in the location + specified in its name. + display_name (str): + A short name to identify the entry group, for + example, "analytics data - jan 2011". Default + value is an empty string. + description (str): + Entry group description. Can consist of + several sentences or paragraphs that describe + the entry group contents. Default value is an + empty string. + data_catalog_timestamps (google.cloud.datacatalog_v1.types.SystemTimestamps): + Output only. Timestamps of the entry group. + Default value is empty. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + data_catalog_timestamps: timestamps.SystemTimestamps = proto.Field( + proto.MESSAGE, + number=4, + message=timestamps.SystemTimestamps, + ) + + +class CreateTagTemplateRequest(proto.Message): + r"""Request message for + [CreateTagTemplate][google.cloud.datacatalog.v1.DataCatalog.CreateTagTemplate]. + + Attributes: + parent (str): + Required. The name of the project and the template location + `region `__. + tag_template_id (str): + Required. The ID of the tag template to create. + + The ID must contain only lowercase letters (a-z), numbers + (0-9), or underscores (_), and must start with a letter or + underscore. The maximum size is 64 bytes when encoded in + UTF-8. + tag_template (google.cloud.datacatalog_v1.types.TagTemplate): + Required. The tag template to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + tag_template_id: str = proto.Field( + proto.STRING, + number=3, + ) + tag_template: gcd_tags.TagTemplate = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_tags.TagTemplate, + ) + + +class GetTagTemplateRequest(proto.Message): + r"""Request message for + [GetTagTemplate][google.cloud.datacatalog.v1.DataCatalog.GetTagTemplate]. + + Attributes: + name (str): + Required. The name of the tag template to + get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateTagTemplateRequest(proto.Message): + r"""Request message for + [UpdateTagTemplate][google.cloud.datacatalog.v1.DataCatalog.UpdateTagTemplate]. + + Attributes: + tag_template (google.cloud.datacatalog_v1.types.TagTemplate): + Required. The template to update. The ``name`` field must be + set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Names of fields whose values to overwrite on a tag template. + Currently, only ``display_name`` and + ``is_publicly_readable`` can be overwritten. + + If this parameter is absent or empty, all modifiable fields + are overwritten. If such fields are non-required and omitted + in the request body, their values are emptied. + + Note: Updating the ``is_publicly_readable`` field may + require up to 12 hours to take effect in search results. + """ + + tag_template: gcd_tags.TagTemplate = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_tags.TagTemplate, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteTagTemplateRequest(proto.Message): + r"""Request message for + [DeleteTagTemplate][google.cloud.datacatalog.v1.DataCatalog.DeleteTagTemplate]. + + Attributes: + name (str): + Required. The name of the tag template to + delete. + force (bool): + Required. If true, deletes all tags that use this template. + + Currently, ``true`` is the only supported value. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class CreateTagRequest(proto.Message): + r"""Request message for + [CreateTag][google.cloud.datacatalog.v1.DataCatalog.CreateTag]. + + Attributes: + parent (str): + Required. The name of the resource to attach + this tag to. + Tags can be attached to entries or entry groups. + An entry can have up to 1000 attached tags. + + Note: The tag and its child resources might not + be stored in the location specified in its name. + tag (google.cloud.datacatalog_v1.types.Tag): + Required. The tag to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + tag: gcd_tags.Tag = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_tags.Tag, + ) + + +class UpdateTagRequest(proto.Message): + r"""Request message for + [UpdateTag][google.cloud.datacatalog.v1.DataCatalog.UpdateTag]. + + Attributes: + tag (google.cloud.datacatalog_v1.types.Tag): + Required. The updated tag. The "name" field + must be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Names of fields whose values to overwrite on a tag. + Currently, a tag has the only modifiable field with the name + ``fields``. + + In general, if this parameter is absent or empty, all + modifiable fields are overwritten. If such fields are + non-required and omitted in the request body, their values + are emptied. + """ + + tag: gcd_tags.Tag = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_tags.Tag, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteTagRequest(proto.Message): + r"""Request message for + [DeleteTag][google.cloud.datacatalog.v1.DataCatalog.DeleteTag]. + + Attributes: + name (str): + Required. The name of the tag to delete. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateTagTemplateFieldRequest(proto.Message): + r"""Request message for + [CreateTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.CreateTagTemplateField]. + + Attributes: + parent (str): + Required. The name of the project and the template location + `region `__. + tag_template_field_id (str): + Required. The ID of the tag template field to create. + + Note: Adding a required field to an existing template is + *not* allowed. + + Field IDs can contain letters (both uppercase and + lowercase), numbers (0-9), underscores (_) and dashes (-). + Field IDs must be at least 1 character long and at most 128 + characters long. Field IDs must also be unique within their + template. + tag_template_field (google.cloud.datacatalog_v1.types.TagTemplateField): + Required. The tag template field to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + tag_template_field_id: str = proto.Field( + proto.STRING, + number=2, + ) + tag_template_field: gcd_tags.TagTemplateField = proto.Field( + proto.MESSAGE, + number=3, + message=gcd_tags.TagTemplateField, + ) + + +class UpdateTagTemplateFieldRequest(proto.Message): + r"""Request message for + [UpdateTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.UpdateTagTemplateField]. + + Attributes: + name (str): + Required. The name of the tag template field. + tag_template_field (google.cloud.datacatalog_v1.types.TagTemplateField): + Required. The template to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Names of fields whose values to overwrite on an + individual field of a tag template. The following fields are + modifiable: + + - ``display_name`` + - ``type.enum_type`` + - ``is_required`` + + If this parameter is absent or empty, all modifiable fields + are overwritten. If such fields are non-required and omitted + in the request body, their values are emptied with one + exception: when updating an enum type, the provided values + are merged with the existing values. Therefore, enum values + can only be added, existing enum values cannot be deleted or + renamed. + + Additionally, updating a template field from optional to + required is *not* allowed. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + tag_template_field: gcd_tags.TagTemplateField = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_tags.TagTemplateField, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class RenameTagTemplateFieldRequest(proto.Message): + r"""Request message for + [RenameTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateField]. + + Attributes: + name (str): + Required. The name of the tag template field. + new_tag_template_field_id (str): + Required. The new ID of this tag template field. For + example, ``my_new_field``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + new_tag_template_field_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RenameTagTemplateFieldEnumValueRequest(proto.Message): + r"""Request message for + [RenameTagTemplateFieldEnumValue][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateFieldEnumValue]. + + Attributes: + name (str): + Required. The name of the enum field value. + new_enum_value_display_name (str): + Required. The new display name of the enum value. For + example, ``my_new_enum_value``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + new_enum_value_display_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteTagTemplateFieldRequest(proto.Message): + r"""Request message for + [DeleteTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.DeleteTagTemplateField]. + + Attributes: + name (str): + Required. The name of the tag template field + to delete. + force (bool): + Required. If true, deletes this field from any tags that use + it. + + Currently, ``true`` is the only supported value. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class ListTagsRequest(proto.Message): + r"""Request message for + [ListTags][google.cloud.datacatalog.v1.DataCatalog.ListTags]. + + Attributes: + parent (str): + Required. The name of the Data Catalog resource to list the + tags of. + + The resource can be an + [Entry][google.cloud.datacatalog.v1.Entry] or an + [EntryGroup][google.cloud.datacatalog.v1.EntryGroup] + (without ``/entries/{entries}`` at the end). + page_size (int): + The maximum number of tags to return. Default + is 10. Maximum limit is 1000. + page_token (str): + Pagination token that specifies the next page + to return. If empty, the first page is returned. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListTagsResponse(proto.Message): + r"""Response message for + [ListTags][google.cloud.datacatalog.v1.DataCatalog.ListTags]. + + Attributes: + tags (MutableSequence[google.cloud.datacatalog_v1.types.Tag]): + [Tag][google.cloud.datacatalog.v1.Tag] details. + next_page_token (str): + Pagination token of the next results page. + Empty if there are no more items in results. + """ + + @property + def raw_page(self): + return self + + tags: MutableSequence[gcd_tags.Tag] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_tags.Tag, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ReconcileTagsRequest(proto.Message): + r"""Request message for + [ReconcileTags][google.cloud.datacatalog.v1.DataCatalog.ReconcileTags]. + + Attributes: + parent (str): + Required. Name of [Entry][google.cloud.datacatalog.v1.Entry] + to be tagged. + tag_template (str): + Required. The name of the tag template, which + is used for reconciliation. + force_delete_missing (bool): + If set to ``true``, deletes entry tags related to a tag + template not listed in the tags source from an entry. If set + to ``false``, unlisted tags are retained. + tags (MutableSequence[google.cloud.datacatalog_v1.types.Tag]): + A list of tags to apply to an entry. A tag can specify a tag + template, which must be the template specified in the + ``ReconcileTagsRequest``. The sole entry and each of its + columns must be mentioned at most once. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + tag_template: str = proto.Field( + proto.STRING, + number=2, + ) + force_delete_missing: bool = proto.Field( + proto.BOOL, + number=3, + ) + tags: MutableSequence[gcd_tags.Tag] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=gcd_tags.Tag, + ) + + +class ReconcileTagsResponse(proto.Message): + r"""[Long-running operation][google.longrunning.Operation] response + message returned by + [ReconcileTags][google.cloud.datacatalog.v1.DataCatalog.ReconcileTags]. + + Attributes: + created_tags_count (int): + Number of tags created in the request. + updated_tags_count (int): + Number of tags updated in the request. + deleted_tags_count (int): + Number of tags deleted in the request. + """ + + created_tags_count: int = proto.Field( + proto.INT64, + number=1, + ) + updated_tags_count: int = proto.Field( + proto.INT64, + number=2, + ) + deleted_tags_count: int = proto.Field( + proto.INT64, + number=3, + ) + + +class ReconcileTagsMetadata(proto.Message): + r"""[Long-running operation][google.longrunning.Operation] metadata + message returned by the + [ReconcileTags][google.cloud.datacatalog.v1.DataCatalog.ReconcileTags]. + + Attributes: + state (google.cloud.datacatalog_v1.types.ReconcileTagsMetadata.ReconciliationState): + State of the reconciliation operation. + errors (MutableMapping[str, google.rpc.status_pb2.Status]): + Maps the name of each tagged column (or empty string for a + sole entry) to tagging operation + [status][google.rpc.Status]. + """ + class ReconciliationState(proto.Enum): + r"""Enum holding possible states of the reconciliation operation. + + Values: + RECONCILIATION_STATE_UNSPECIFIED (0): + Default value. This value is unused. + RECONCILIATION_QUEUED (1): + The reconciliation has been queued and awaits + for execution. + RECONCILIATION_IN_PROGRESS (2): + The reconciliation is in progress. + RECONCILIATION_DONE (3): + The reconciliation has been finished. + """ + RECONCILIATION_STATE_UNSPECIFIED = 0 + RECONCILIATION_QUEUED = 1 + RECONCILIATION_IN_PROGRESS = 2 + RECONCILIATION_DONE = 3 + + state: ReconciliationState = proto.Field( + proto.ENUM, + number=1, + enum=ReconciliationState, + ) + errors: MutableMapping[str, status_pb2.Status] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + + +class ListEntriesRequest(proto.Message): + r"""Request message for + [ListEntries][google.cloud.datacatalog.v1.DataCatalog.ListEntries]. + + Attributes: + parent (str): + Required. The name of the entry group that + contains the entries to list. + Can be provided in URL format. + page_size (int): + The maximum number of items to return. Default is 10. + Maximum limit is 1000. Throws an invalid argument if + ``page_size`` is more than 1000. + page_token (str): + Pagination token that specifies the next page + to return. If empty, the first page is returned. + read_mask (google.protobuf.field_mask_pb2.FieldMask): + The fields to return for each entry. If empty or omitted, + all fields are returned. + + For example, to return a list of entries with only the + ``name`` field, set ``read_mask`` to only one path with the + ``name`` value. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + read_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=4, + message=field_mask_pb2.FieldMask, + ) + + +class ListEntriesResponse(proto.Message): + r"""Response message for + [ListEntries][google.cloud.datacatalog.v1.DataCatalog.ListEntries]. + + Attributes: + entries (MutableSequence[google.cloud.datacatalog_v1.types.Entry]): + Entry details. + next_page_token (str): + Pagination token of the next results page. + Empty if there are no more items in results. + """ + + @property + def raw_page(self): + return self + + entries: MutableSequence['Entry'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Entry', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class StarEntryRequest(proto.Message): + r"""Request message for + [StarEntry][google.cloud.datacatalog.v1.DataCatalog.StarEntry]. + + Attributes: + name (str): + Required. The name of the entry to mark as + starred. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class StarEntryResponse(proto.Message): + r"""Response message for + [StarEntry][google.cloud.datacatalog.v1.DataCatalog.StarEntry]. + Empty for now + + """ + + +class UnstarEntryRequest(proto.Message): + r"""Request message for + [UnstarEntry][google.cloud.datacatalog.v1.DataCatalog.UnstarEntry]. + + Attributes: + name (str): + Required. The name of the entry to mark as **not** starred. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UnstarEntryResponse(proto.Message): + r"""Response message for + [UnstarEntry][google.cloud.datacatalog.v1.DataCatalog.UnstarEntry]. + Empty for now + + """ + + +class ImportEntriesRequest(proto.Message): + r"""Request message for + [ImportEntries][google.cloud.datacatalog.v1.DataCatalog.ImportEntries] + method. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. Target entry group for ingested + entries. + gcs_bucket_path (str): + Path to a Cloud Storage bucket that contains + a dump ready for ingestion. + + This field is a member of `oneof`_ ``source``. + job_id (str): + Optional. (Optional) Dataplex task job id, if + specified will be used as part of ImportEntries + LRO ID + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + gcs_bucket_path: str = proto.Field( + proto.STRING, + number=2, + oneof='source', + ) + job_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ImportEntriesResponse(proto.Message): + r"""Response message for [long-running + operation][google.longrunning.Operation] returned by the + [ImportEntries][google.cloud.datacatalog.v1.DataCatalog.ImportEntries]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + upserted_entries_count (int): + Cumulative number of entries created and + entries updated as a result of import operation. + + This field is a member of `oneof`_ ``_upserted_entries_count``. + deleted_entries_count (int): + Number of entries deleted as a result of + import operation. + + This field is a member of `oneof`_ ``_deleted_entries_count``. + """ + + upserted_entries_count: int = proto.Field( + proto.INT64, + number=5, + optional=True, + ) + deleted_entries_count: int = proto.Field( + proto.INT64, + number=6, + optional=True, + ) + + +class ImportEntriesMetadata(proto.Message): + r"""Metadata message for [long-running + operation][google.longrunning.Operation] returned by the + [ImportEntries][google.cloud.datacatalog.v1.DataCatalog.ImportEntries]. + + Attributes: + state (google.cloud.datacatalog_v1.types.ImportEntriesMetadata.ImportState): + State of the import operation. + errors (MutableSequence[google.rpc.status_pb2.Status]): + Partial errors that are encountered during + the ImportEntries operation. There is no + guarantee that all the encountered errors are + reported. However, if no errors are reported, it + means that no errors were encountered. + """ + class ImportState(proto.Enum): + r"""Enum holding possible states of the import operation. + + Values: + IMPORT_STATE_UNSPECIFIED (0): + Default value. This value is unused. + IMPORT_QUEUED (1): + The dump with entries has been queued for + import. + IMPORT_IN_PROGRESS (2): + The import of entries is in progress. + IMPORT_DONE (3): + The import of entries has been finished. + IMPORT_OBSOLETE (4): + The import of entries has been abandoned in + favor of a newer request. + """ + IMPORT_STATE_UNSPECIFIED = 0 + IMPORT_QUEUED = 1 + IMPORT_IN_PROGRESS = 2 + IMPORT_DONE = 3 + IMPORT_OBSOLETE = 4 + + state: ImportState = proto.Field( + proto.ENUM, + number=1, + enum=ImportState, + ) + errors: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + + +class ModifyEntryOverviewRequest(proto.Message): + r"""Request message for + [ModifyEntryOverview][google.cloud.datacatalog.v1.DataCatalog.ModifyEntryOverview]. + + Attributes: + name (str): + Required. The full resource name of the + entry. + entry_overview (google.cloud.datacatalog_v1.types.EntryOverview): + Required. The new value for the Entry + Overview. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + entry_overview: 'EntryOverview' = proto.Field( + proto.MESSAGE, + number=2, + message='EntryOverview', + ) + + +class ModifyEntryContactsRequest(proto.Message): + r"""Request message for + [ModifyEntryContacts][google.cloud.datacatalog.v1.DataCatalog.ModifyEntryContacts]. + + Attributes: + name (str): + Required. The full resource name of the + entry. + contacts (google.cloud.datacatalog_v1.types.Contacts): + Required. The new value for the Contacts. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + contacts: 'Contacts' = proto.Field( + proto.MESSAGE, + number=2, + message='Contacts', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/dataplex_spec.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/dataplex_spec.py new file mode 100644 index 000000000000..6d19a71e6562 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/dataplex_spec.py @@ -0,0 +1,170 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.datacatalog_v1.types import common +from google.cloud.datacatalog_v1.types import physical_schema + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1', + manifest={ + 'DataplexSpec', + 'DataplexFilesetSpec', + 'DataplexTableSpec', + 'DataplexExternalTable', + }, +) + + +class DataplexSpec(proto.Message): + r"""Common Dataplex fields. + + Attributes: + asset (str): + Fully qualified resource name of an asset in + Dataplex, to which the underlying data source + (Cloud Storage bucket or BigQuery dataset) of + the entity is attached. + data_format (google.cloud.datacatalog_v1.types.PhysicalSchema): + Format of the data. + compression_format (str): + Compression format of the data, e.g., zip, + gzip etc. + project_id (str): + Project ID of the underlying Cloud Storage or + BigQuery data. Note that this may not be the + same project as the correspondingly Dataplex + lake / zone / asset. + """ + + asset: str = proto.Field( + proto.STRING, + number=1, + ) + data_format: physical_schema.PhysicalSchema = proto.Field( + proto.MESSAGE, + number=2, + message=physical_schema.PhysicalSchema, + ) + compression_format: str = proto.Field( + proto.STRING, + number=3, + ) + project_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DataplexFilesetSpec(proto.Message): + r"""Entry specyfication for a Dataplex fileset. + + Attributes: + dataplex_spec (google.cloud.datacatalog_v1.types.DataplexSpec): + Common Dataplex fields. + """ + + dataplex_spec: 'DataplexSpec' = proto.Field( + proto.MESSAGE, + number=1, + message='DataplexSpec', + ) + + +class DataplexTableSpec(proto.Message): + r"""Entry specification for a Dataplex table. + + Attributes: + external_tables (MutableSequence[google.cloud.datacatalog_v1.types.DataplexExternalTable]): + List of external tables registered by + Dataplex in other systems based on the same + underlying data. + + External tables allow to query this data in + those systems. + dataplex_spec (google.cloud.datacatalog_v1.types.DataplexSpec): + Common Dataplex fields. + user_managed (bool): + Indicates if the table schema is managed by + the user or not. + """ + + external_tables: MutableSequence['DataplexExternalTable'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DataplexExternalTable', + ) + dataplex_spec: 'DataplexSpec' = proto.Field( + proto.MESSAGE, + number=2, + message='DataplexSpec', + ) + user_managed: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DataplexExternalTable(proto.Message): + r"""External table registered by Dataplex. + Dataplex publishes data discovered from an asset into multiple + other systems (BigQuery, DPMS) in form of tables. We call them + "external tables". External tables are also synced into the Data + Catalog. + This message contains pointers to + those external tables (fully qualified name, resource name et + cetera) within the Data Catalog. + + Attributes: + system (google.cloud.datacatalog_v1.types.IntegratedSystem): + Service in which the external table is + registered. + fully_qualified_name (str): + Fully qualified name (FQN) of the external + table. + google_cloud_resource (str): + Google Cloud resource name of the external + table. + data_catalog_entry (str): + Name of the Data Catalog entry representing + the external table. + """ + + system: common.IntegratedSystem = proto.Field( + proto.ENUM, + number=1, + enum=common.IntegratedSystem, + ) + fully_qualified_name: str = proto.Field( + proto.STRING, + number=28, + ) + google_cloud_resource: str = proto.Field( + proto.STRING, + number=3, + ) + data_catalog_entry: str = proto.Field( + proto.STRING, + number=4, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/dump_content.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/dump_content.py new file mode 100644 index 000000000000..2f859e874e94 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/dump_content.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.datacatalog_v1.types import datacatalog +from google.cloud.datacatalog_v1.types import tags + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1', + manifest={ + 'TaggedEntry', + 'DumpItem', + }, +) + + +class TaggedEntry(proto.Message): + r"""Wrapper containing Entry and information about Tags + that should and should not be attached to it. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + v1_entry (google.cloud.datacatalog_v1.types.Entry): + Non-encrypted Data Catalog v1 Entry. + + This field is a member of `oneof`_ ``entry``. + present_tags (MutableSequence[google.cloud.datacatalog_v1.types.Tag]): + Optional. Tags that should be ingested into + the Data Catalog. Caller should populate + template name, column and fields. + absent_tags (MutableSequence[google.cloud.datacatalog_v1.types.Tag]): + Optional. Tags that should be deleted from + the Data Catalog. Caller should populate + template name and column only. + """ + + v1_entry: datacatalog.Entry = proto.Field( + proto.MESSAGE, + number=1, + oneof='entry', + message=datacatalog.Entry, + ) + present_tags: MutableSequence[tags.Tag] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=tags.Tag, + ) + absent_tags: MutableSequence[tags.Tag] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=tags.Tag, + ) + + +class DumpItem(proto.Message): + r"""Wrapper for any item that can be contained in the dump. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + tagged_entry (google.cloud.datacatalog_v1.types.TaggedEntry): + Entry and its tags. + + This field is a member of `oneof`_ ``item``. + """ + + tagged_entry: 'TaggedEntry' = proto.Field( + proto.MESSAGE, + number=1, + oneof='item', + message='TaggedEntry', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/gcs_fileset_spec.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/gcs_fileset_spec.py new file mode 100644 index 000000000000..e6cdd35f90f3 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/gcs_fileset_spec.py @@ -0,0 +1,119 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.datacatalog_v1.types import timestamps + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1', + manifest={ + 'GcsFilesetSpec', + 'GcsFileSpec', + }, +) + + +class GcsFilesetSpec(proto.Message): + r"""Describes a Cloud Storage fileset entry. + + Attributes: + file_patterns (MutableSequence[str]): + Required. Patterns to identify a set of files in Google + Cloud Storage. + + For more information, see [Wildcard Names] + (https://cloud.google.com/storage/docs/gsutil/addlhelp/WildcardNames). + + Note: Currently, bucket wildcards are not supported. + + Examples of valid ``file_patterns``: + + - ``gs://bucket_name/dir/*``: matches all files in + ``bucket_name/dir`` directory + - ``gs://bucket_name/dir/**``: matches all files in + ``bucket_name/dir`` and all subdirectories + - ``gs://bucket_name/file*``: matches files prefixed by + ``file`` in ``bucket_name`` + - ``gs://bucket_name/??.txt``: matches files with two + characters followed by ``.txt`` in ``bucket_name`` + - ``gs://bucket_name/[aeiou].txt``: matches files that + contain a single vowel character followed by ``.txt`` in + ``bucket_name`` + - ``gs://bucket_name/[a-m].txt``: matches files that + contain ``a``, ``b``, ... or ``m`` followed by ``.txt`` + in ``bucket_name`` + - ``gs://bucket_name/a/*/b``: matches all files in + ``bucket_name`` that match the ``a/*/b`` pattern, such as + ``a/c/b``, ``a/d/b`` + - ``gs://another_bucket/a.txt``: matches + ``gs://another_bucket/a.txt`` + + You can combine wildcards to match complex sets of files, + for example: + + ``gs://bucket_name/[a-m]??.j*g`` + sample_gcs_file_specs (MutableSequence[google.cloud.datacatalog_v1.types.GcsFileSpec]): + Output only. Sample files contained in this + fileset, not all files contained in this fileset + are represented here. + """ + + file_patterns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + sample_gcs_file_specs: MutableSequence['GcsFileSpec'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='GcsFileSpec', + ) + + +class GcsFileSpec(proto.Message): + r"""Specification of a single file in Cloud Storage. + + Attributes: + file_path (str): + Required. Full file path. Example: + ``gs://bucket_name/a/b.txt``. + gcs_timestamps (google.cloud.datacatalog_v1.types.SystemTimestamps): + Output only. Creation, modification, and + expiration timestamps of a Cloud Storage file. + size_bytes (int): + Output only. File size in bytes. + """ + + file_path: str = proto.Field( + proto.STRING, + number=1, + ) + gcs_timestamps: timestamps.SystemTimestamps = proto.Field( + proto.MESSAGE, + number=2, + message=timestamps.SystemTimestamps, + ) + size_bytes: int = proto.Field( + proto.INT64, + number=4, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/physical_schema.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/physical_schema.py new file mode 100644 index 000000000000..82d77736ed3b --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/physical_schema.py @@ -0,0 +1,158 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1', + manifest={ + 'PhysicalSchema', + }, +) + + +class PhysicalSchema(proto.Message): + r"""Native schema used by a resource represented as an entry. + Used by query engines for deserializing and parsing source data. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + avro (google.cloud.datacatalog_v1.types.PhysicalSchema.AvroSchema): + Schema in Avro JSON format. + + This field is a member of `oneof`_ ``schema``. + thrift (google.cloud.datacatalog_v1.types.PhysicalSchema.ThriftSchema): + Schema in Thrift format. + + This field is a member of `oneof`_ ``schema``. + protobuf (google.cloud.datacatalog_v1.types.PhysicalSchema.ProtobufSchema): + Schema in protocol buffer format. + + This field is a member of `oneof`_ ``schema``. + parquet (google.cloud.datacatalog_v1.types.PhysicalSchema.ParquetSchema): + Marks a Parquet-encoded data source. + + This field is a member of `oneof`_ ``schema``. + orc (google.cloud.datacatalog_v1.types.PhysicalSchema.OrcSchema): + Marks an ORC-encoded data source. + + This field is a member of `oneof`_ ``schema``. + csv (google.cloud.datacatalog_v1.types.PhysicalSchema.CsvSchema): + Marks a CSV-encoded data source. + + This field is a member of `oneof`_ ``schema``. + """ + + class AvroSchema(proto.Message): + r"""Schema in Avro JSON format. + + Attributes: + text (str): + JSON source of the Avro schema. + """ + + text: str = proto.Field( + proto.STRING, + number=1, + ) + + class ThriftSchema(proto.Message): + r"""Schema in Thrift format. + + Attributes: + text (str): + Thrift IDL source of the schema. + """ + + text: str = proto.Field( + proto.STRING, + number=1, + ) + + class ProtobufSchema(proto.Message): + r"""Schema in protocol buffer format. + + Attributes: + text (str): + Protocol buffer source of the schema. + """ + + text: str = proto.Field( + proto.STRING, + number=1, + ) + + class ParquetSchema(proto.Message): + r"""Marks a Parquet-encoded data source. + """ + + class OrcSchema(proto.Message): + r"""Marks an ORC-encoded data source. + """ + + class CsvSchema(proto.Message): + r"""Marks a CSV-encoded data source. + """ + + avro: AvroSchema = proto.Field( + proto.MESSAGE, + number=1, + oneof='schema', + message=AvroSchema, + ) + thrift: ThriftSchema = proto.Field( + proto.MESSAGE, + number=2, + oneof='schema', + message=ThriftSchema, + ) + protobuf: ProtobufSchema = proto.Field( + proto.MESSAGE, + number=3, + oneof='schema', + message=ProtobufSchema, + ) + parquet: ParquetSchema = proto.Field( + proto.MESSAGE, + number=4, + oneof='schema', + message=ParquetSchema, + ) + orc: OrcSchema = proto.Field( + proto.MESSAGE, + number=5, + oneof='schema', + message=OrcSchema, + ) + csv: CsvSchema = proto.Field( + proto.MESSAGE, + number=6, + oneof='schema', + message=CsvSchema, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/policytagmanager.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/policytagmanager.py new file mode 100644 index 000000000000..4624c46cb4b5 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/policytagmanager.py @@ -0,0 +1,551 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.datacatalog_v1.types import common +from google.cloud.datacatalog_v1.types import timestamps +from google.protobuf import field_mask_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1', + manifest={ + 'Taxonomy', + 'PolicyTag', + 'CreateTaxonomyRequest', + 'DeleteTaxonomyRequest', + 'UpdateTaxonomyRequest', + 'ListTaxonomiesRequest', + 'ListTaxonomiesResponse', + 'GetTaxonomyRequest', + 'CreatePolicyTagRequest', + 'DeletePolicyTagRequest', + 'UpdatePolicyTagRequest', + 'ListPolicyTagsRequest', + 'ListPolicyTagsResponse', + 'GetPolicyTagRequest', + }, +) + + +class Taxonomy(proto.Message): + r"""A taxonomy is a collection of hierarchical policy tags that classify + data along a common axis. + + For example, a "data sensitivity" taxonomy might contain the + following policy tags: + + :: + + + PII + + Account number + + Age + + SSN + + Zipcode + + Financials + + Revenue + + A "data origin" taxonomy might contain the following policy tags: + + :: + + + User data + + Employee data + + Partner data + + Public data + + Attributes: + name (str): + Output only. Resource name of this taxonomy + in URL format. + Note: Policy tag manager generates unique + taxonomy IDs. + display_name (str): + Required. User-defined name of this taxonomy. + + The name can't start or end with spaces, must + contain only Unicode letters, numbers, + underscores, dashes, and spaces, and be at most + 200 bytes long when encoded in UTF-8. + + The taxonomy display name must be unique within + an organization. + description (str): + Optional. Description of this taxonomy. If + not set, defaults to empty. + The description must contain only Unicode + characters, tabs, newlines, carriage returns, + and page breaks, and be at most 2000 bytes long + when encoded in UTF-8. + policy_tag_count (int): + Output only. Number of policy tags in this + taxonomy. + taxonomy_timestamps (google.cloud.datacatalog_v1.types.SystemTimestamps): + Output only. Creation and modification + timestamps of this taxonomy. + activated_policy_types (MutableSequence[google.cloud.datacatalog_v1.types.Taxonomy.PolicyType]): + Optional. A list of policy types that are + activated for this taxonomy. If not set, + defaults to an empty list. + service (google.cloud.datacatalog_v1.types.Taxonomy.Service): + Output only. Identity of the service which + owns the Taxonomy. This field is only populated + when the taxonomy is created by a Google Cloud + service. Currently only 'DATAPLEX' is supported. + """ + class PolicyType(proto.Enum): + r"""Defines policy types where the policy tags can be used for. + + Values: + POLICY_TYPE_UNSPECIFIED (0): + Unspecified policy type. + FINE_GRAINED_ACCESS_CONTROL (1): + Fine-grained access control policy that + enables access control on tagged sub-resources. + """ + POLICY_TYPE_UNSPECIFIED = 0 + FINE_GRAINED_ACCESS_CONTROL = 1 + + class Service(proto.Message): + r"""The source system of the Taxonomy. + + Attributes: + name (google.cloud.datacatalog_v1.types.ManagingSystem): + The Google Cloud service name. + identity (str): + The service agent for the service. + """ + + name: common.ManagingSystem = proto.Field( + proto.ENUM, + number=1, + enum=common.ManagingSystem, + ) + identity: str = proto.Field( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + policy_tag_count: int = proto.Field( + proto.INT32, + number=4, + ) + taxonomy_timestamps: timestamps.SystemTimestamps = proto.Field( + proto.MESSAGE, + number=5, + message=timestamps.SystemTimestamps, + ) + activated_policy_types: MutableSequence[PolicyType] = proto.RepeatedField( + proto.ENUM, + number=6, + enum=PolicyType, + ) + service: Service = proto.Field( + proto.MESSAGE, + number=7, + message=Service, + ) + + +class PolicyTag(proto.Message): + r"""Denotes one policy tag in a taxonomy, for example, SSN. + + Policy tags can be defined in a hierarchy. For example: + + :: + + + Geolocation + + LatLong + + City + + ZipCode + + Where the "Geolocation" policy tag contains three children. + + Attributes: + name (str): + Output only. Resource name of this policy tag + in the URL format. + The policy tag manager generates unique taxonomy + IDs and policy tag IDs. + display_name (str): + Required. User-defined name of this policy + tag. + The name can't start or end with spaces and must + be unique within the parent taxonomy, contain + only Unicode letters, numbers, underscores, + dashes and spaces, and be at most 200 bytes long + when encoded in UTF-8. + description (str): + Description of this policy tag. If not set, + defaults to empty. + The description must contain only Unicode + characters, tabs, newlines, carriage returns and + page breaks, and be at most 2000 bytes long when + encoded in UTF-8. + parent_policy_tag (str): + Resource name of this policy tag's parent + policy tag. If empty, this is a top level tag. + If not set, defaults to an empty string. + + For example, for the "LatLong" policy tag in the + example above, this field contains the resource + name of the "Geolocation" policy tag, and, for + "Geolocation", this field is empty. + child_policy_tags (MutableSequence[str]): + Output only. Resource names of child policy + tags of this policy tag. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + parent_policy_tag: str = proto.Field( + proto.STRING, + number=4, + ) + child_policy_tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + + +class CreateTaxonomyRequest(proto.Message): + r"""Request message for + [CreateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.CreateTaxonomy]. + + Attributes: + parent (str): + Required. Resource name of the project that + the taxonomy will belong to. + taxonomy (google.cloud.datacatalog_v1.types.Taxonomy): + The taxonomy to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + taxonomy: 'Taxonomy' = proto.Field( + proto.MESSAGE, + number=2, + message='Taxonomy', + ) + + +class DeleteTaxonomyRequest(proto.Message): + r"""Request message for + [DeleteTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.DeleteTaxonomy]. + + Attributes: + name (str): + Required. Resource name of the taxonomy to + delete. + Note: All policy tags in this taxonomy are also + deleted. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateTaxonomyRequest(proto.Message): + r"""Request message for + [UpdateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.UpdateTaxonomy]. + + Attributes: + taxonomy (google.cloud.datacatalog_v1.types.Taxonomy): + The taxonomy to update. You can update only + its description, display name, and activated + policy types. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Specifies fields to update. If not set, defaults to all + fields you can update. + + For more information, see [FieldMask] + (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask). + """ + + taxonomy: 'Taxonomy' = proto.Field( + proto.MESSAGE, + number=1, + message='Taxonomy', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class ListTaxonomiesRequest(proto.Message): + r"""Request message for + [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. + + Attributes: + parent (str): + Required. Resource name of the project to + list the taxonomies of. + page_size (int): + The maximum number of items to return. Must + be a value between 1 and 1000 inclusively. If + not set, defaults to 50. + page_token (str): + The pagination token of the next results + page. If not set, the first page is returned. + + The token is returned in the response to a + previous list request. + filter (str): + Supported field for filter is 'service' and + value is 'dataplex'. Eg: service=dataplex. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListTaxonomiesResponse(proto.Message): + r"""Response message for + [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. + + Attributes: + taxonomies (MutableSequence[google.cloud.datacatalog_v1.types.Taxonomy]): + Taxonomies that the project contains. + next_page_token (str): + Pagination token of the next results page. + Empty if there are no more results in the list. + """ + + @property + def raw_page(self): + return self + + taxonomies: MutableSequence['Taxonomy'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Taxonomy', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetTaxonomyRequest(proto.Message): + r"""Request message for + [GetTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.GetTaxonomy]. + + Attributes: + name (str): + Required. Resource name of the taxonomy to + get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreatePolicyTagRequest(proto.Message): + r"""Request message for + [CreatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.CreatePolicyTag]. + + Attributes: + parent (str): + Required. Resource name of the taxonomy that + the policy tag will belong to. + policy_tag (google.cloud.datacatalog_v1.types.PolicyTag): + The policy tag to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + policy_tag: 'PolicyTag' = proto.Field( + proto.MESSAGE, + number=2, + message='PolicyTag', + ) + + +class DeletePolicyTagRequest(proto.Message): + r"""Request message for + [DeletePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.DeletePolicyTag]. + + Attributes: + name (str): + Required. Resource name of the policy tag to + delete. + Note: All of its descendant policy tags are also + deleted. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdatePolicyTagRequest(proto.Message): + r"""Request message for + [UpdatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.UpdatePolicyTag]. + + Attributes: + policy_tag (google.cloud.datacatalog_v1.types.PolicyTag): + The policy tag to update. You can update only + its description, display name, and parent policy + tag fields. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Specifies the fields to update. + + You can update only display name, description, and parent + policy tag. If not set, defaults to all updatable fields. + For more information, see [FieldMask] + (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask). + """ + + policy_tag: 'PolicyTag' = proto.Field( + proto.MESSAGE, + number=1, + message='PolicyTag', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class ListPolicyTagsRequest(proto.Message): + r"""Request message for + [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. + + Attributes: + parent (str): + Required. Resource name of the taxonomy to + list the policy tags of. + page_size (int): + The maximum number of items to return. Must + be a value between 1 and 1000 inclusively. + If not set, defaults to 50. + page_token (str): + The pagination token of the next results + page. If not set, returns the first page. + + The token is returned in the response to a + previous list request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListPolicyTagsResponse(proto.Message): + r"""Response message for + [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. + + Attributes: + policy_tags (MutableSequence[google.cloud.datacatalog_v1.types.PolicyTag]): + The policy tags that belong to the taxonomy. + next_page_token (str): + Pagination token of the next results page. + Empty if there are no more results in the list. + """ + + @property + def raw_page(self): + return self + + policy_tags: MutableSequence['PolicyTag'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='PolicyTag', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetPolicyTagRequest(proto.Message): + r"""Request message for + [GetPolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.GetPolicyTag]. + + Attributes: + name (str): + Required. Resource name of the policy tag. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/policytagmanagerserialization.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/policytagmanagerserialization.py new file mode 100644 index 000000000000..1ed5a73de618 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/policytagmanagerserialization.py @@ -0,0 +1,288 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.datacatalog_v1.types import policytagmanager + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1', + manifest={ + 'SerializedTaxonomy', + 'SerializedPolicyTag', + 'ReplaceTaxonomyRequest', + 'ImportTaxonomiesRequest', + 'InlineSource', + 'CrossRegionalSource', + 'ImportTaxonomiesResponse', + 'ExportTaxonomiesRequest', + 'ExportTaxonomiesResponse', + }, +) + + +class SerializedTaxonomy(proto.Message): + r"""A nested protocol buffer that represents a taxonomy and the + hierarchy of its policy tags. Used for taxonomy replacement, + import, and export. + + Attributes: + display_name (str): + Required. Display name of the taxonomy. At + most 200 bytes when encoded in UTF-8. + description (str): + Description of the serialized taxonomy. At + most 2000 bytes when encoded in UTF-8. If not + set, defaults to an empty description. + policy_tags (MutableSequence[google.cloud.datacatalog_v1.types.SerializedPolicyTag]): + Top level policy tags associated with the + taxonomy, if any. + activated_policy_types (MutableSequence[google.cloud.datacatalog_v1.types.Taxonomy.PolicyType]): + A list of policy types that are activated per + taxonomy. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + policy_tags: MutableSequence['SerializedPolicyTag'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='SerializedPolicyTag', + ) + activated_policy_types: MutableSequence[policytagmanager.Taxonomy.PolicyType] = proto.RepeatedField( + proto.ENUM, + number=4, + enum=policytagmanager.Taxonomy.PolicyType, + ) + + +class SerializedPolicyTag(proto.Message): + r"""A nested protocol buffer that represents a policy tag and all + its descendants. + + Attributes: + policy_tag (str): + Resource name of the policy tag. + + This field is ignored when calling ``ImportTaxonomies``. + display_name (str): + Required. Display name of the policy tag. At + most 200 bytes when encoded in UTF-8. + description (str): + Description of the serialized policy tag. At + most 2000 bytes when encoded in UTF-8. If not + set, defaults to an empty description. + child_policy_tags (MutableSequence[google.cloud.datacatalog_v1.types.SerializedPolicyTag]): + Children of the policy tag, if any. + """ + + policy_tag: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + child_policy_tags: MutableSequence['SerializedPolicyTag'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='SerializedPolicyTag', + ) + + +class ReplaceTaxonomyRequest(proto.Message): + r"""Request message for + [ReplaceTaxonomy][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ReplaceTaxonomy]. + + Attributes: + name (str): + Required. Resource name of the taxonomy to + update. + serialized_taxonomy (google.cloud.datacatalog_v1.types.SerializedTaxonomy): + Required. Taxonomy to update along with its + child policy tags. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + serialized_taxonomy: 'SerializedTaxonomy' = proto.Field( + proto.MESSAGE, + number=2, + message='SerializedTaxonomy', + ) + + +class ImportTaxonomiesRequest(proto.Message): + r"""Request message for + [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. Resource name of project that the + imported taxonomies will belong to. + inline_source (google.cloud.datacatalog_v1.types.InlineSource): + Inline source taxonomy to import. + + This field is a member of `oneof`_ ``source``. + cross_regional_source (google.cloud.datacatalog_v1.types.CrossRegionalSource): + Cross-regional source taxonomy to import. + + This field is a member of `oneof`_ ``source``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + inline_source: 'InlineSource' = proto.Field( + proto.MESSAGE, + number=2, + oneof='source', + message='InlineSource', + ) + cross_regional_source: 'CrossRegionalSource' = proto.Field( + proto.MESSAGE, + number=3, + oneof='source', + message='CrossRegionalSource', + ) + + +class InlineSource(proto.Message): + r"""Inline source containing taxonomies to import. + + Attributes: + taxonomies (MutableSequence[google.cloud.datacatalog_v1.types.SerializedTaxonomy]): + Required. Taxonomies to import. + """ + + taxonomies: MutableSequence['SerializedTaxonomy'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='SerializedTaxonomy', + ) + + +class CrossRegionalSource(proto.Message): + r"""Cross-regional source used to import an existing taxonomy + into a different region. + + Attributes: + taxonomy (str): + Required. The resource name of the source + taxonomy to import. + """ + + taxonomy: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ImportTaxonomiesResponse(proto.Message): + r"""Response message for + [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. + + Attributes: + taxonomies (MutableSequence[google.cloud.datacatalog_v1.types.Taxonomy]): + Imported taxonomies. + """ + + taxonomies: MutableSequence[policytagmanager.Taxonomy] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=policytagmanager.Taxonomy, + ) + + +class ExportTaxonomiesRequest(proto.Message): + r"""Request message for + [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. Resource name of the project that + the exported taxonomies belong to. + taxonomies (MutableSequence[str]): + Required. Resource names of the taxonomies to + export. + serialized_taxonomies (bool): + Serialized export taxonomies that contain all + the policy tags as nested protocol buffers. + + This field is a member of `oneof`_ ``destination``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + taxonomies: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + serialized_taxonomies: bool = proto.Field( + proto.BOOL, + number=3, + oneof='destination', + ) + + +class ExportTaxonomiesResponse(proto.Message): + r"""Response message for + [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. + + Attributes: + taxonomies (MutableSequence[google.cloud.datacatalog_v1.types.SerializedTaxonomy]): + List of taxonomies and policy tags as nested + protocol buffers. + """ + + taxonomies: MutableSequence['SerializedTaxonomy'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='SerializedTaxonomy', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/schema.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/schema.py new file mode 100644 index 000000000000..3a82e77b213e --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/schema.py @@ -0,0 +1,204 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1', + manifest={ + 'Schema', + 'ColumnSchema', + }, +) + + +class Schema(proto.Message): + r"""Represents a schema, for example, a BigQuery, GoogleSQL, or + Avro schema. + + Attributes: + columns (MutableSequence[google.cloud.datacatalog_v1.types.ColumnSchema]): + The unified GoogleSQL-like schema of columns. + + The overall maximum number of columns and nested + columns is 10,000. The maximum nested depth is + 15 levels. + """ + + columns: MutableSequence['ColumnSchema'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='ColumnSchema', + ) + + +class ColumnSchema(proto.Message): + r"""A column within a schema. Columns can be nested inside + other columns. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + column (str): + Required. Name of the column. + + Must be a UTF-8 string without dots (.). + The maximum size is 64 bytes. + type_ (str): + Required. Type of the column. + + Must be a UTF-8 string with the maximum size of + 128 bytes. + description (str): + Optional. Description of the column. Default + value is an empty string. + The description must be a UTF-8 string with the + maximum size of 2000 bytes. + mode (str): + Optional. A column's mode indicates whether values in this + column are required, nullable, or repeated. + + Only ``NULLABLE``, ``REQUIRED``, and ``REPEATED`` values are + supported. Default mode is ``NULLABLE``. + default_value (str): + Optional. Default value for the column. + ordinal_position (int): + Optional. Ordinal position + highest_indexing_type (google.cloud.datacatalog_v1.types.ColumnSchema.IndexingType): + Optional. Most important inclusion of this + column. + subcolumns (MutableSequence[google.cloud.datacatalog_v1.types.ColumnSchema]): + Optional. Schema of sub-columns. A column can + have zero or more sub-columns. + looker_column_spec (google.cloud.datacatalog_v1.types.ColumnSchema.LookerColumnSpec): + Looker specific column info of this column. + + This field is a member of `oneof`_ ``system_spec``. + gc_rule (str): + Optional. Garbage collection policy for the + column or column family. Applies to systems like + Cloud Bigtable. + """ + class IndexingType(proto.Enum): + r"""Specifies inclusion of the column in an index + + Values: + INDEXING_TYPE_UNSPECIFIED (0): + Unspecified. + INDEXING_TYPE_NONE (1): + Column not a part of an index. + INDEXING_TYPE_NON_UNIQUE (2): + Column Part of non unique index. + INDEXING_TYPE_UNIQUE (3): + Column part of unique index. + INDEXING_TYPE_PRIMARY_KEY (4): + Column part of the primary key. + """ + INDEXING_TYPE_UNSPECIFIED = 0 + INDEXING_TYPE_NONE = 1 + INDEXING_TYPE_NON_UNIQUE = 2 + INDEXING_TYPE_UNIQUE = 3 + INDEXING_TYPE_PRIMARY_KEY = 4 + + class LookerColumnSpec(proto.Message): + r"""Column info specific to Looker System. + + Attributes: + type_ (google.cloud.datacatalog_v1.types.ColumnSchema.LookerColumnSpec.LookerColumnType): + Looker specific column type of this column. + """ + class LookerColumnType(proto.Enum): + r"""Column type in Looker. + + Values: + LOOKER_COLUMN_TYPE_UNSPECIFIED (0): + Unspecified. + DIMENSION (1): + Dimension. + DIMENSION_GROUP (2): + Dimension group - parent for Dimension. + FILTER (3): + Filter. + MEASURE (4): + Measure. + PARAMETER (5): + Parameter. + """ + LOOKER_COLUMN_TYPE_UNSPECIFIED = 0 + DIMENSION = 1 + DIMENSION_GROUP = 2 + FILTER = 3 + MEASURE = 4 + PARAMETER = 5 + + type_: 'ColumnSchema.LookerColumnSpec.LookerColumnType' = proto.Field( + proto.ENUM, + number=1, + enum='ColumnSchema.LookerColumnSpec.LookerColumnType', + ) + + column: str = proto.Field( + proto.STRING, + number=6, + ) + type_: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + mode: str = proto.Field( + proto.STRING, + number=3, + ) + default_value: str = proto.Field( + proto.STRING, + number=8, + ) + ordinal_position: int = proto.Field( + proto.INT32, + number=9, + ) + highest_indexing_type: IndexingType = proto.Field( + proto.ENUM, + number=10, + enum=IndexingType, + ) + subcolumns: MutableSequence['ColumnSchema'] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='ColumnSchema', + ) + looker_column_spec: LookerColumnSpec = proto.Field( + proto.MESSAGE, + number=18, + oneof='system_spec', + message=LookerColumnSpec, + ) + gc_rule: str = proto.Field( + proto.STRING, + number=11, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/search.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/search.py new file mode 100644 index 000000000000..56747a5be5a1 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/search.py @@ -0,0 +1,183 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.datacatalog_v1.types import common +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1', + manifest={ + 'SearchResultType', + 'SearchCatalogResult', + }, +) + + +class SearchResultType(proto.Enum): + r"""The resource types that can be returned in search results. + + Values: + SEARCH_RESULT_TYPE_UNSPECIFIED (0): + Default unknown type. + ENTRY (1): + An [Entry][google.cloud.datacatalog.v1.Entry]. + TAG_TEMPLATE (2): + A [TagTemplate][google.cloud.datacatalog.v1.TagTemplate]. + ENTRY_GROUP (3): + An [EntryGroup][google.cloud.datacatalog.v1.EntryGroup]. + """ + SEARCH_RESULT_TYPE_UNSPECIFIED = 0 + ENTRY = 1 + TAG_TEMPLATE = 2 + ENTRY_GROUP = 3 + + +class SearchCatalogResult(proto.Message): + r"""Result in the response to a search request. + + Each result captures details of one entry that matches the + search. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + search_result_type (google.cloud.datacatalog_v1.types.SearchResultType): + Type of the search result. + + You can use this field to determine which get + method to call to fetch the full resource. + search_result_subtype (str): + Sub-type of the search result. + + A dot-delimited full type of the resource. The same type you + specify in the ``type`` search predicate. + + Examples: ``entry.table``, ``entry.dataStream``, + ``tagTemplate``. + relative_resource_name (str): + The relative name of the resource in URL format. + + Examples: + + - ``projects/{PROJECT_ID}/locations/{LOCATION_ID}/entryGroups/{ENTRY_GROUP_ID}/entries/{ENTRY_ID}`` + - ``projects/{PROJECT_ID}/tagTemplates/{TAG_TEMPLATE_ID}`` + linked_resource (str): + The full name of the Google Cloud resource the entry belongs + to. + + For more information, see [Full Resource Name] + (/apis/design/resource_names#full_resource_name). + + Example: + + ``//bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID`` + modify_time (google.protobuf.timestamp_pb2.Timestamp): + The last modification timestamp of the entry + in the source system. + integrated_system (google.cloud.datacatalog_v1.types.IntegratedSystem): + Output only. The source system that Data + Catalog automatically integrates with, such as + BigQuery, Cloud Pub/Sub, or Dataproc Metastore. + + This field is a member of `oneof`_ ``system``. + user_specified_system (str): + Custom source system that you can manually + integrate Data Catalog with. + + This field is a member of `oneof`_ ``system``. + fully_qualified_name (str): + Fully qualified name (FQN) of the resource. + + FQNs take two forms: + + - For non-regionalized resources: + + ``{SYSTEM}:{PROJECT}.{PATH_TO_RESOURCE_SEPARATED_WITH_DOTS}`` + + - For regionalized resources: + + ``{SYSTEM}:{PROJECT}.{LOCATION_ID}.{PATH_TO_RESOURCE_SEPARATED_WITH_DOTS}`` + + Example for a DPMS table: + + ``dataproc_metastore:PROJECT_ID.LOCATION_ID.INSTANCE_ID.DATABASE_ID.TABLE_ID`` + display_name (str): + The display name of the result. + description (str): + Entry description that can consist of several + sentences or paragraphs that describe entry + contents. + """ + + search_result_type: 'SearchResultType' = proto.Field( + proto.ENUM, + number=1, + enum='SearchResultType', + ) + search_result_subtype: str = proto.Field( + proto.STRING, + number=2, + ) + relative_resource_name: str = proto.Field( + proto.STRING, + number=3, + ) + linked_resource: str = proto.Field( + proto.STRING, + number=4, + ) + modify_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + integrated_system: common.IntegratedSystem = proto.Field( + proto.ENUM, + number=8, + oneof='system', + enum=common.IntegratedSystem, + ) + user_specified_system: str = proto.Field( + proto.STRING, + number=9, + oneof='system', + ) + fully_qualified_name: str = proto.Field( + proto.STRING, + number=10, + ) + display_name: str = proto.Field( + proto.STRING, + number=12, + ) + description: str = proto.Field( + proto.STRING, + number=13, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/table_spec.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/table_spec.py new file mode 100644 index 000000000000..0441bde6862a --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/table_spec.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1', + manifest={ + 'TableSourceType', + 'BigQueryTableSpec', + 'ViewSpec', + 'TableSpec', + 'BigQueryDateShardedSpec', + }, +) + + +class TableSourceType(proto.Enum): + r"""Table source type. + + Values: + TABLE_SOURCE_TYPE_UNSPECIFIED (0): + Default unknown type. + BIGQUERY_VIEW (2): + Table view. + BIGQUERY_TABLE (5): + BigQuery native table. + BIGQUERY_MATERIALIZED_VIEW (7): + BigQuery materialized view. + """ + TABLE_SOURCE_TYPE_UNSPECIFIED = 0 + BIGQUERY_VIEW = 2 + BIGQUERY_TABLE = 5 + BIGQUERY_MATERIALIZED_VIEW = 7 + + +class BigQueryTableSpec(proto.Message): + r"""Describes a BigQuery table. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + table_source_type (google.cloud.datacatalog_v1.types.TableSourceType): + Output only. The table source type. + view_spec (google.cloud.datacatalog_v1.types.ViewSpec): + Table view specification. Populated only if the + ``table_source_type`` is ``BIGQUERY_VIEW``. + + This field is a member of `oneof`_ ``type_spec``. + table_spec (google.cloud.datacatalog_v1.types.TableSpec): + Specification of a BigQuery table. Populated only if the + ``table_source_type`` is ``BIGQUERY_TABLE``. + + This field is a member of `oneof`_ ``type_spec``. + """ + + table_source_type: 'TableSourceType' = proto.Field( + proto.ENUM, + number=1, + enum='TableSourceType', + ) + view_spec: 'ViewSpec' = proto.Field( + proto.MESSAGE, + number=2, + oneof='type_spec', + message='ViewSpec', + ) + table_spec: 'TableSpec' = proto.Field( + proto.MESSAGE, + number=3, + oneof='type_spec', + message='TableSpec', + ) + + +class ViewSpec(proto.Message): + r"""Table view specification. + + Attributes: + view_query (str): + Output only. The query that defines the table + view. + """ + + view_query: str = proto.Field( + proto.STRING, + number=1, + ) + + +class TableSpec(proto.Message): + r"""Normal BigQuery table specification. + + Attributes: + grouped_entry (str): + Output only. If the table is date-sharded, that is, it + matches the ``[prefix]YYYYMMDD`` name pattern, this field is + the Data Catalog resource name of the date-sharded grouped + entry. For example: + + ``projects/{PROJECT_ID}/locations/{LOCATION}/entrygroups/{ENTRY_GROUP_ID}/entries/{ENTRY_ID}``. + + Otherwise, ``grouped_entry`` is empty. + """ + + grouped_entry: str = proto.Field( + proto.STRING, + number=1, + ) + + +class BigQueryDateShardedSpec(proto.Message): + r"""Specification for a group of BigQuery tables with the + ``[prefix]YYYYMMDD`` name pattern. + + For more information, see [Introduction to partitioned tables] + (https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning_versus_sharding). + + Attributes: + dataset (str): + Output only. The Data Catalog resource name of the dataset + entry the current table belongs to. For example: + + ``projects/{PROJECT_ID}/locations/{LOCATION}/entrygroups/{ENTRY_GROUP_ID}/entries/{ENTRY_ID}``. + table_prefix (str): + Output only. The table name prefix of the shards. + + The name of any given shard is ``[table_prefix]YYYYMMDD``. + For example, for the ``MyTable20180101`` shard, the + ``table_prefix`` is ``MyTable``. + shard_count (int): + Output only. Total number of shards. + latest_shard_resource (str): + Output only. BigQuery resource name of the + latest shard. + """ + + dataset: str = proto.Field( + proto.STRING, + number=1, + ) + table_prefix: str = proto.Field( + proto.STRING, + number=2, + ) + shard_count: int = proto.Field( + proto.INT64, + number=3, + ) + latest_shard_resource: str = proto.Field( + proto.STRING, + number=4, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/tags.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/tags.py new file mode 100644 index 000000000000..b50679727e69 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/tags.py @@ -0,0 +1,466 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1', + manifest={ + 'Tag', + 'TagField', + 'TagTemplate', + 'TagTemplateField', + 'FieldType', + }, +) + + +class Tag(proto.Message): + r"""Tags contain custom metadata and are attached to Data Catalog + resources. Tags conform with the specification of their tag + template. + + See `Data Catalog + IAM `__ for + information on the permissions needed to create or view tags. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The resource name of the tag in URL format + where tag ID is a system-generated identifier. + + Note: The tag itself might not be stored in the + location specified in its name. + template (str): + Required. The resource name of the tag template this tag + uses. Example: + + ``projects/{PROJECT_ID}/locations/{LOCATION}/tagTemplates/{TAG_TEMPLATE_ID}`` + + This field cannot be modified after creation. + template_display_name (str): + Output only. The display name of the tag + template. + column (str): + Resources like entry can have schemas associated with them. + This scope allows you to attach tags to an individual column + based on that schema. + + To attach a tag to a nested column, separate column names + with a dot (``.``). Example: ``column.nested_column``. + + This field is a member of `oneof`_ ``scope``. + fields (MutableMapping[str, google.cloud.datacatalog_v1.types.TagField]): + Required. Maps the ID of a tag field to its + value and additional information about that + field. + + Tag template defines valid field IDs. A tag + must have at least 1 field and at most 500 + fields. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + template: str = proto.Field( + proto.STRING, + number=2, + ) + template_display_name: str = proto.Field( + proto.STRING, + number=5, + ) + column: str = proto.Field( + proto.STRING, + number=4, + oneof='scope', + ) + fields: MutableMapping[str, 'TagField'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=3, + message='TagField', + ) + + +class TagField(proto.Message): + r"""Contains the value and additional information on a field within a + [Tag][google.cloud.datacatalog.v1.Tag]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + display_name (str): + Output only. The display name of this field. + double_value (float): + The value of a tag field with a double type. + + This field is a member of `oneof`_ ``kind``. + string_value (str): + The value of a tag field with a string type. + + The maximum length is 2000 UTF-8 characters. + + This field is a member of `oneof`_ ``kind``. + bool_value (bool): + The value of a tag field with a boolean type. + + This field is a member of `oneof`_ ``kind``. + timestamp_value (google.protobuf.timestamp_pb2.Timestamp): + The value of a tag field with a timestamp + type. + + This field is a member of `oneof`_ ``kind``. + enum_value (google.cloud.datacatalog_v1.types.TagField.EnumValue): + The value of a tag field with an enum type. + + This value must be one of the allowed values + listed in this enum. + + This field is a member of `oneof`_ ``kind``. + richtext_value (str): + The value of a tag field with a rich text + type. + The maximum length is 10 MiB as this value holds + HTML descriptions including encoded images. The + maximum length of the text without images is 100 + KiB. + + This field is a member of `oneof`_ ``kind``. + order (int): + Output only. The order of this field with respect to other + fields in this tag. Can be set by + [Tag][google.cloud.datacatalog.v1.TagTemplateField.order]. + + For example, a higher value can indicate a more important + field. The value can be negative. Multiple fields can have + the same order, and field orders within a tag don't have to + be sequential. + """ + + class EnumValue(proto.Message): + r"""An enum value. + + Attributes: + display_name (str): + The display name of the enum value. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + double_value: float = proto.Field( + proto.DOUBLE, + number=2, + oneof='kind', + ) + string_value: str = proto.Field( + proto.STRING, + number=3, + oneof='kind', + ) + bool_value: bool = proto.Field( + proto.BOOL, + number=4, + oneof='kind', + ) + timestamp_value: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + oneof='kind', + message=timestamp_pb2.Timestamp, + ) + enum_value: EnumValue = proto.Field( + proto.MESSAGE, + number=6, + oneof='kind', + message=EnumValue, + ) + richtext_value: str = proto.Field( + proto.STRING, + number=8, + oneof='kind', + ) + order: int = proto.Field( + proto.INT32, + number=7, + ) + + +class TagTemplate(proto.Message): + r"""A tag template defines a tag that can have one or more typed fields. + + The template is used to create tags that are attached to Google + Cloud resources. [Tag template roles] + (https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) + provide permissions to create, edit, and use the template. For + example, see the [TagTemplate User] + (https://cloud.google.com/data-catalog/docs/how-to/template-user) + role that includes a permission to use the tag template to tag + resources. + + Attributes: + name (str): + The resource name of the tag template in URL + format. + Note: The tag template itself and its child + resources might not be stored in the location + specified in its name. + display_name (str): + Display name for this template. Defaults to an empty string. + + The name must contain only Unicode letters, numbers (0-9), + underscores (_), dashes (-), spaces ( ), and can't start or + end with spaces. The maximum length is 200 characters. + is_publicly_readable (bool): + Indicates whether tags created with this template are + public. Public tags do not require tag template access to + appear in [ListTags][google.cloud.datacatalog.v1.ListTags] + API response. + + Additionally, you can search for a public tag by value with + a simple search query in addition to using a ``tag:`` + predicate. + fields (MutableMapping[str, google.cloud.datacatalog_v1.types.TagTemplateField]): + Required. Map of tag template field IDs to the settings for + the field. This map is an exhaustive list of the allowed + fields. The map must contain at least one field and at most + 500 fields. + + The keys to this map are tag template field IDs. The IDs + have the following limitations: + + - Can contain uppercase and lowercase letters, numbers + (0-9) and underscores (_). + - Must be at least 1 character and at most 64 characters + long. + - Must start with a letter or underscore. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + is_publicly_readable: bool = proto.Field( + proto.BOOL, + number=5, + ) + fields: MutableMapping[str, 'TagTemplateField'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=3, + message='TagTemplateField', + ) + + +class TagTemplateField(proto.Message): + r"""The template for an individual field within a tag template. + + Attributes: + name (str): + Output only. The resource name of the tag template field in + URL format. Example: + + ``projects/{PROJECT_ID}/locations/{LOCATION}/tagTemplates/{TAG_TEMPLATE}/fields/{FIELD}`` + + Note: The tag template field itself might not be stored in + the location specified in its name. + + The name must contain only letters (a-z, A-Z), numbers + (0-9), or underscores (_), and must start with a letter or + underscore. The maximum length is 64 characters. + display_name (str): + The display name for this field. Defaults to an empty + string. + + The name must contain only Unicode letters, numbers (0-9), + underscores (_), dashes (-), spaces ( ), and can't start or + end with spaces. The maximum length is 200 characters. + type_ (google.cloud.datacatalog_v1.types.FieldType): + Required. The type of value this tag field + can contain. + is_required (bool): + If true, this field is required. Defaults to + false. + description (str): + The description for this field. Defaults to + an empty string. + order (int): + The order of this field with respect to other + fields in this tag template. + + For example, a higher value can indicate a more + important field. The value can be negative. + Multiple fields can have the same order and + field orders within a tag don't have to be + sequential. + """ + + name: str = proto.Field( + proto.STRING, + number=6, + ) + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: 'FieldType' = proto.Field( + proto.MESSAGE, + number=2, + message='FieldType', + ) + is_required: bool = proto.Field( + proto.BOOL, + number=3, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + order: int = proto.Field( + proto.INT32, + number=5, + ) + + +class FieldType(proto.Message): + r""" + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + primitive_type (google.cloud.datacatalog_v1.types.FieldType.PrimitiveType): + Primitive types, such as string, boolean, + etc. + + This field is a member of `oneof`_ ``type_decl``. + enum_type (google.cloud.datacatalog_v1.types.FieldType.EnumType): + An enum type. + + This field is a member of `oneof`_ ``type_decl``. + """ + class PrimitiveType(proto.Enum): + r""" + + Values: + PRIMITIVE_TYPE_UNSPECIFIED (0): + The default invalid value for a type. + DOUBLE (1): + A double precision number. + STRING (2): + An UTF-8 string. + BOOL (3): + A boolean value. + TIMESTAMP (4): + A timestamp. + RICHTEXT (5): + A Richtext description. + """ + PRIMITIVE_TYPE_UNSPECIFIED = 0 + DOUBLE = 1 + STRING = 2 + BOOL = 3 + TIMESTAMP = 4 + RICHTEXT = 5 + + class EnumType(proto.Message): + r""" + + Attributes: + allowed_values (MutableSequence[google.cloud.datacatalog_v1.types.FieldType.EnumType.EnumValue]): + The set of allowed values for this enum. + + This set must not be empty and can include up to 100 allowed + values. The display names of the values in this set must not + be empty and must be case-insensitively unique within this + set. + + The order of items in this set is preserved. This field can + be used to create, remove, and reorder enum values. To + rename enum values, use the + ``RenameTagTemplateFieldEnumValue`` method. + """ + + class EnumValue(proto.Message): + r""" + + Attributes: + display_name (str): + Required. The display name of the enum value. Must not be an + empty string. + + The name must contain only Unicode letters, numbers (0-9), + underscores (_), dashes (-), spaces ( ), and can't start or + end with spaces. The maximum length is 200 characters. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + + allowed_values: MutableSequence['FieldType.EnumType.EnumValue'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='FieldType.EnumType.EnumValue', + ) + + primitive_type: PrimitiveType = proto.Field( + proto.ENUM, + number=1, + oneof='type_decl', + enum=PrimitiveType, + ) + enum_type: EnumType = proto.Field( + proto.MESSAGE, + number=2, + oneof='type_decl', + message=EnumType, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/timestamps.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/timestamps.py new file mode 100644 index 000000000000..faa4bc694cb8 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/timestamps.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1', + manifest={ + 'SystemTimestamps', + }, +) + + +class SystemTimestamps(proto.Message): + r"""Timestamps associated with this resource in a particular + system. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Creation timestamp of the resource within the + given system. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp of the last modification of the + resource or its metadata within a given system. + + Note: Depending on the source system, not every + modification updates this timestamp. + For example, BigQuery timestamps every metadata + modification but not data or permission changes. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Expiration timestamp of the + resource within the given system. + Currently only applicable to BigQuery resources. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/usage.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/usage.py new file mode 100644 index 000000000000..de40855f62db --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/usage.py @@ -0,0 +1,156 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1', + manifest={ + 'UsageStats', + 'CommonUsageStats', + 'UsageSignal', + }, +) + + +class UsageStats(proto.Message): + r"""Detailed statistics on the entry's usage. + + Usage statistics have the following limitations: + + - Only BigQuery tables have them. + - They only include BigQuery query jobs. + - They might be underestimated because wildcard table references + are not yet counted. For more information, see [Querying multiple + tables using a wildcard table] + (https://cloud.google.com/bigquery/docs/querying-wildcard-tables) + + Attributes: + total_completions (float): + The number of successful uses of the + underlying entry. + total_failures (float): + The number of failed attempts to use the + underlying entry. + total_cancellations (float): + The number of cancelled attempts to use the + underlying entry. + total_execution_time_for_completions_millis (float): + Total time spent only on successful uses, in + milliseconds. + """ + + total_completions: float = proto.Field( + proto.FLOAT, + number=1, + ) + total_failures: float = proto.Field( + proto.FLOAT, + number=2, + ) + total_cancellations: float = proto.Field( + proto.FLOAT, + number=3, + ) + total_execution_time_for_completions_millis: float = proto.Field( + proto.FLOAT, + number=4, + ) + + +class CommonUsageStats(proto.Message): + r"""Common statistics on the entry's usage. + + They can be set on any system. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + view_count (int): + View count in source system. + + This field is a member of `oneof`_ ``_view_count``. + """ + + view_count: int = proto.Field( + proto.INT64, + number=1, + optional=True, + ) + + +class UsageSignal(proto.Message): + r"""The set of all usage signals that Data Catalog stores. + + Note: Usually, these signals are updated daily. In rare cases, + an update may fail but will be performed again on the next day. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + update_time (google.protobuf.timestamp_pb2.Timestamp): + The end timestamp of the duration of usage + statistics. + usage_within_time_range (MutableMapping[str, google.cloud.datacatalog_v1.types.UsageStats]): + Output only. BigQuery usage statistics over each of the + predefined time ranges. + + Supported time ranges are ``{"24H", "7D", "30D"}``. + common_usage_within_time_range (MutableMapping[str, google.cloud.datacatalog_v1.types.CommonUsageStats]): + Common usage statistics over each of the predefined time + ranges. + + Supported time ranges are + ``{"24H", "7D", "30D", "Lifetime"}``. + favorite_count (int): + Favorite count in the source system. + + This field is a member of `oneof`_ ``_favorite_count``. + """ + + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + usage_within_time_range: MutableMapping[str, 'UsageStats'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=2, + message='UsageStats', + ) + common_usage_within_time_range: MutableMapping[str, 'CommonUsageStats'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=3, + message='CommonUsageStats', + ) + favorite_count: int = proto.Field( + proto.INT64, + number=4, + optional=True, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/mypy.ini b/owl-bot-staging/google-cloud-datacatalog/v1/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/noxfile.py b/owl-bot-staging/google-cloud-datacatalog/v1/noxfile.py new file mode 100644 index 000000000000..7c6730232962 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/noxfile.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.11" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "lint_setup_py", +] + +@nox.session(python=ALL_PYTHON) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/datacatalog_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_async.py new file mode 100644 index 000000000000..54c6f72a5136 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_CreateEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_create_entry(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + entry = datacatalog_v1.Entry() + entry.type_ = "LOOK" + entry.integrated_system = "VERTEX_AI" + entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] + + request = datacatalog_v1.CreateEntryRequest( + parent="parent_value", + entry_id="entry_id_value", + entry=entry, + ) + + # Make the request + response = await client.create_entry(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_CreateEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_group_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_group_async.py new file mode 100644 index 000000000000..d057ba61b2e9 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_group_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_CreateEntryGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_create_entry_group(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.CreateEntryGroupRequest( + parent="parent_value", + entry_group_id="entry_group_id_value", + ) + + # Make the request + response = await client.create_entry_group(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_CreateEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_group_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_group_sync.py new file mode 100644 index 000000000000..2ed262520b99 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_group_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_CreateEntryGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_create_entry_group(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.CreateEntryGroupRequest( + parent="parent_value", + entry_group_id="entry_group_id_value", + ) + + # Make the request + response = client.create_entry_group(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_CreateEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_sync.py new file mode 100644 index 000000000000..16317907ef44 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_CreateEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_create_entry(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + entry = datacatalog_v1.Entry() + entry.type_ = "LOOK" + entry.integrated_system = "VERTEX_AI" + entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] + + request = datacatalog_v1.CreateEntryRequest( + parent="parent_value", + entry_id="entry_id_value", + entry=entry, + ) + + # Make the request + response = client.create_entry(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_CreateEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_async.py new file mode 100644 index 000000000000..9200460d1744 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_CreateTag_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_create_tag(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + tag = datacatalog_v1.Tag() + tag.column = "column_value" + tag.template = "template_value" + + request = datacatalog_v1.CreateTagRequest( + parent="parent_value", + tag=tag, + ) + + # Make the request + response = await client.create_tag(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_CreateTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_sync.py new file mode 100644 index 000000000000..fca175bcfea8 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_CreateTag_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_create_tag(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + tag = datacatalog_v1.Tag() + tag.column = "column_value" + tag.template = "template_value" + + request = datacatalog_v1.CreateTagRequest( + parent="parent_value", + tag=tag, + ) + + # Make the request + response = client.create_tag(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_CreateTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_async.py new file mode 100644 index 000000000000..386f19d98a39 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTagTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_CreateTagTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_create_tag_template(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.CreateTagTemplateRequest( + parent="parent_value", + tag_template_id="tag_template_id_value", + ) + + # Make the request + response = await client.create_tag_template(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_CreateTagTemplate_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_field_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_field_async.py new file mode 100644 index 000000000000..dbec9a803904 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_field_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTagTemplateField +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_CreateTagTemplateField_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_create_tag_template_field(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + tag_template_field = datacatalog_v1.TagTemplateField() + tag_template_field.type_.primitive_type = "RICHTEXT" + + request = datacatalog_v1.CreateTagTemplateFieldRequest( + parent="parent_value", + tag_template_field_id="tag_template_field_id_value", + tag_template_field=tag_template_field, + ) + + # Make the request + response = await client.create_tag_template_field(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_CreateTagTemplateField_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_field_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_field_sync.py new file mode 100644 index 000000000000..f91d58e30ec2 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_field_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTagTemplateField +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_CreateTagTemplateField_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_create_tag_template_field(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + tag_template_field = datacatalog_v1.TagTemplateField() + tag_template_field.type_.primitive_type = "RICHTEXT" + + request = datacatalog_v1.CreateTagTemplateFieldRequest( + parent="parent_value", + tag_template_field_id="tag_template_field_id_value", + tag_template_field=tag_template_field, + ) + + # Make the request + response = client.create_tag_template_field(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_CreateTagTemplateField_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_sync.py new file mode 100644 index 000000000000..78c8204e54f0 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTagTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_CreateTagTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_create_tag_template(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.CreateTagTemplateRequest( + parent="parent_value", + tag_template_id="tag_template_id_value", + ) + + # Make the request + response = client.create_tag_template(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_CreateTagTemplate_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_async.py new file mode 100644 index 000000000000..ee89eb7798c8 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_DeleteEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_delete_entry(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteEntryRequest( + name="name_value", + ) + + # Make the request + await client.delete_entry(request=request) + + +# [END datacatalog_v1_generated_DataCatalog_DeleteEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_group_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_group_async.py new file mode 100644 index 000000000000..df2cfd22436e --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_group_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_DeleteEntryGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_delete_entry_group(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteEntryGroupRequest( + name="name_value", + ) + + # Make the request + await client.delete_entry_group(request=request) + + +# [END datacatalog_v1_generated_DataCatalog_DeleteEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_group_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_group_sync.py new file mode 100644 index 000000000000..9f11c663f32e --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_group_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_DeleteEntryGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_delete_entry_group(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteEntryGroupRequest( + name="name_value", + ) + + # Make the request + client.delete_entry_group(request=request) + + +# [END datacatalog_v1_generated_DataCatalog_DeleteEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_sync.py new file mode 100644 index 000000000000..5f7e0f63e554 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_DeleteEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_delete_entry(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteEntryRequest( + name="name_value", + ) + + # Make the request + client.delete_entry(request=request) + + +# [END datacatalog_v1_generated_DataCatalog_DeleteEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_async.py new file mode 100644 index 000000000000..4cd043e360cd --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_DeleteTag_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_delete_tag(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteTagRequest( + name="name_value", + ) + + # Make the request + await client.delete_tag(request=request) + + +# [END datacatalog_v1_generated_DataCatalog_DeleteTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_sync.py new file mode 100644 index 000000000000..34192ac63908 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_DeleteTag_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_delete_tag(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteTagRequest( + name="name_value", + ) + + # Make the request + client.delete_tag(request=request) + + +# [END datacatalog_v1_generated_DataCatalog_DeleteTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_async.py new file mode 100644 index 000000000000..19cb502cd241 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTagTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_DeleteTagTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_delete_tag_template(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteTagTemplateRequest( + name="name_value", + force=True, + ) + + # Make the request + await client.delete_tag_template(request=request) + + +# [END datacatalog_v1_generated_DataCatalog_DeleteTagTemplate_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_field_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_field_async.py new file mode 100644 index 000000000000..5c7c981e1594 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_field_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTagTemplateField +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_DeleteTagTemplateField_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_delete_tag_template_field(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteTagTemplateFieldRequest( + name="name_value", + force=True, + ) + + # Make the request + await client.delete_tag_template_field(request=request) + + +# [END datacatalog_v1_generated_DataCatalog_DeleteTagTemplateField_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_field_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_field_sync.py new file mode 100644 index 000000000000..163cc051fc6b --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_field_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTagTemplateField +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_DeleteTagTemplateField_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_delete_tag_template_field(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteTagTemplateFieldRequest( + name="name_value", + force=True, + ) + + # Make the request + client.delete_tag_template_field(request=request) + + +# [END datacatalog_v1_generated_DataCatalog_DeleteTagTemplateField_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_sync.py new file mode 100644 index 000000000000..adf699293348 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTagTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_DeleteTagTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_delete_tag_template(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteTagTemplateRequest( + name="name_value", + force=True, + ) + + # Make the request + client.delete_tag_template(request=request) + + +# [END datacatalog_v1_generated_DataCatalog_DeleteTagTemplate_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_async.py new file mode 100644 index 000000000000..302dd4280271 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_GetEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_get_entry(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.GetEntryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_GetEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_group_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_group_async.py new file mode 100644 index 000000000000..b56aafa11e72 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_group_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_GetEntryGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_get_entry_group(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.GetEntryGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry_group(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_GetEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_group_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_group_sync.py new file mode 100644 index 000000000000..50314eef3272 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_group_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_GetEntryGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_get_entry_group(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.GetEntryGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry_group(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_GetEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_sync.py new file mode 100644 index 000000000000..8e48e4aff6a7 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_GetEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_get_entry(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.GetEntryRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_GetEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_iam_policy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_iam_policy_async.py new file mode 100644 index 000000000000..88a0c41f6a12 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_iam_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_GetIamPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_get_iam_policy(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_GetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_iam_policy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_iam_policy_sync.py new file mode 100644 index 000000000000..4d74494edaae --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_get_iam_policy(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_GetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_tag_template_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_tag_template_async.py new file mode 100644 index 000000000000..690f210fb2c8 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_tag_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTagTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_GetTagTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_get_tag_template(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.GetTagTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_tag_template(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_GetTagTemplate_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_tag_template_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_tag_template_sync.py new file mode 100644 index 000000000000..13727551bbaf --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_tag_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTagTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_GetTagTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_get_tag_template(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.GetTagTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_tag_template(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_GetTagTemplate_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_import_entries_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_import_entries_async.py new file mode 100644 index 000000000000..2b6bf08da25f --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_import_entries_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_ImportEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_import_entries(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.ImportEntriesRequest( + gcs_bucket_path="gcs_bucket_path_value", + parent="parent_value", + ) + + # Make the request + operation = client.import_entries(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_ImportEntries_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_import_entries_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_import_entries_sync.py new file mode 100644 index 000000000000..bc77b4ce6cff --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_import_entries_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_ImportEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_import_entries(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.ImportEntriesRequest( + gcs_bucket_path="gcs_bucket_path_value", + parent="parent_value", + ) + + # Make the request + operation = client.import_entries(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_ImportEntries_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entries_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entries_async.py new file mode 100644 index 000000000000..c9b1a9b43b12 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entries_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_ListEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_list_entries(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.ListEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entries(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datacatalog_v1_generated_DataCatalog_ListEntries_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entries_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entries_sync.py new file mode 100644 index 000000000000..d035990417ec --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entries_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_ListEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_list_entries(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.ListEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datacatalog_v1_generated_DataCatalog_ListEntries_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entry_groups_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entry_groups_async.py new file mode 100644 index 000000000000..c6eb9896d1b2 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entry_groups_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntryGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_ListEntryGroups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_list_entry_groups(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.ListEntryGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datacatalog_v1_generated_DataCatalog_ListEntryGroups_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entry_groups_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entry_groups_sync.py new file mode 100644 index 000000000000..9332af6b72f8 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entry_groups_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntryGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_ListEntryGroups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_list_entry_groups(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.ListEntryGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datacatalog_v1_generated_DataCatalog_ListEntryGroups_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_tags_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_tags_async.py new file mode 100644 index 000000000000..ced100b01580 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_tags_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTags +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_ListTags_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_list_tags(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.ListTagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tags(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datacatalog_v1_generated_DataCatalog_ListTags_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_tags_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_tags_sync.py new file mode 100644 index 000000000000..972b8001db6a --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_tags_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTags +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_ListTags_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_list_tags(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.ListTagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tags(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datacatalog_v1_generated_DataCatalog_ListTags_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_lookup_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_lookup_entry_async.py new file mode 100644 index 000000000000..7367122315bd --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_lookup_entry_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LookupEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_LookupEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_lookup_entry(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.LookupEntryRequest( + linked_resource="linked_resource_value", + ) + + # Make the request + response = await client.lookup_entry(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_LookupEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_lookup_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_lookup_entry_sync.py new file mode 100644 index 000000000000..e63535f74323 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_lookup_entry_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LookupEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_LookupEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_lookup_entry(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.LookupEntryRequest( + linked_resource="linked_resource_value", + ) + + # Make the request + response = client.lookup_entry(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_LookupEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_contacts_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_contacts_async.py new file mode 100644 index 000000000000..e90b18d8ec9a --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_contacts_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModifyEntryContacts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_ModifyEntryContacts_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_modify_entry_contacts(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.ModifyEntryContactsRequest( + name="name_value", + ) + + # Make the request + response = await client.modify_entry_contacts(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_ModifyEntryContacts_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_contacts_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_contacts_sync.py new file mode 100644 index 000000000000..3c62b038b46c --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_contacts_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModifyEntryContacts +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_ModifyEntryContacts_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_modify_entry_contacts(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.ModifyEntryContactsRequest( + name="name_value", + ) + + # Make the request + response = client.modify_entry_contacts(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_ModifyEntryContacts_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_overview_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_overview_async.py new file mode 100644 index 000000000000..bddcd6e48ece --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_overview_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModifyEntryOverview +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_ModifyEntryOverview_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_modify_entry_overview(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.ModifyEntryOverviewRequest( + name="name_value", + ) + + # Make the request + response = await client.modify_entry_overview(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_ModifyEntryOverview_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_overview_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_overview_sync.py new file mode 100644 index 000000000000..cc50d86d6d43 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_overview_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModifyEntryOverview +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_ModifyEntryOverview_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_modify_entry_overview(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.ModifyEntryOverviewRequest( + name="name_value", + ) + + # Make the request + response = client.modify_entry_overview(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_ModifyEntryOverview_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_reconcile_tags_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_reconcile_tags_async.py new file mode 100644 index 000000000000..d03e830e5eeb --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_reconcile_tags_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReconcileTags +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_ReconcileTags_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_reconcile_tags(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.ReconcileTagsRequest( + parent="parent_value", + tag_template="tag_template_value", + ) + + # Make the request + operation = client.reconcile_tags(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_ReconcileTags_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_reconcile_tags_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_reconcile_tags_sync.py new file mode 100644 index 000000000000..db7d90ef4d74 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_reconcile_tags_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReconcileTags +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_ReconcileTags_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_reconcile_tags(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.ReconcileTagsRequest( + parent="parent_value", + tag_template="tag_template_value", + ) + + # Make the request + operation = client.reconcile_tags(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_ReconcileTags_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_async.py new file mode 100644 index 000000000000..becfd2d292b9 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RenameTagTemplateField +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_RenameTagTemplateField_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_rename_tag_template_field(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.RenameTagTemplateFieldRequest( + name="name_value", + new_tag_template_field_id="new_tag_template_field_id_value", + ) + + # Make the request + response = await client.rename_tag_template_field(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_RenameTagTemplateField_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_async.py new file mode 100644 index 000000000000..2f2bd73d10ce --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RenameTagTemplateFieldEnumValue +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_rename_tag_template_field_enum_value(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.RenameTagTemplateFieldEnumValueRequest( + name="name_value", + new_enum_value_display_name="new_enum_value_display_name_value", + ) + + # Make the request + response = await client.rename_tag_template_field_enum_value(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py new file mode 100644 index 000000000000..719a36507137 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RenameTagTemplateFieldEnumValue +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_rename_tag_template_field_enum_value(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.RenameTagTemplateFieldEnumValueRequest( + name="name_value", + new_enum_value_display_name="new_enum_value_display_name_value", + ) + + # Make the request + response = client.rename_tag_template_field_enum_value(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_sync.py new file mode 100644 index 000000000000..722704f3f2b2 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RenameTagTemplateField +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_RenameTagTemplateField_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_rename_tag_template_field(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.RenameTagTemplateFieldRequest( + name="name_value", + new_tag_template_field_id="new_tag_template_field_id_value", + ) + + # Make the request + response = client.rename_tag_template_field(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_RenameTagTemplateField_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_search_catalog_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_search_catalog_async.py new file mode 100644 index 000000000000..62bb1d15bb0c --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_search_catalog_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchCatalog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_SearchCatalog_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_search_catalog(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.SearchCatalogRequest( + ) + + # Make the request + page_result = client.search_catalog(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datacatalog_v1_generated_DataCatalog_SearchCatalog_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_search_catalog_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_search_catalog_sync.py new file mode 100644 index 000000000000..9e2673c05d52 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_search_catalog_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchCatalog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_SearchCatalog_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_search_catalog(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.SearchCatalogRequest( + ) + + # Make the request + page_result = client.search_catalog(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datacatalog_v1_generated_DataCatalog_SearchCatalog_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_iam_policy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_iam_policy_async.py new file mode 100644 index 000000000000..d9654f7affa6 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_iam_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_SetIamPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_set_iam_policy(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_SetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_iam_policy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_iam_policy_sync.py new file mode 100644 index 000000000000..5dc66b715318 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_set_iam_policy(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_SetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_star_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_star_entry_async.py new file mode 100644 index 000000000000..2370cfc3e188 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_star_entry_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StarEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_StarEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_star_entry(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.StarEntryRequest( + name="name_value", + ) + + # Make the request + response = await client.star_entry(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_StarEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_star_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_star_entry_sync.py new file mode 100644 index 000000000000..5bee56cb6e74 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_star_entry_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StarEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_StarEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_star_entry(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.StarEntryRequest( + name="name_value", + ) + + # Make the request + response = client.star_entry(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_StarEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_test_iam_permissions_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_test_iam_permissions_async.py new file mode 100644 index 000000000000..76be1c9b0ba7 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_test_iam_permissions_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_TestIamPermissions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_test_iam_permissions(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_TestIamPermissions_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_test_iam_permissions_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_test_iam_permissions_sync.py new file mode 100644 index 000000000000..4d2bb42106f3 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_test_iam_permissions(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_TestIamPermissions_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_unstar_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_unstar_entry_async.py new file mode 100644 index 000000000000..6dd5a2120ba5 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_unstar_entry_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UnstarEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_UnstarEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_unstar_entry(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.UnstarEntryRequest( + name="name_value", + ) + + # Make the request + response = await client.unstar_entry(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_UnstarEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_unstar_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_unstar_entry_sync.py new file mode 100644 index 000000000000..d1712e6863db --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_unstar_entry_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UnstarEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_UnstarEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_unstar_entry(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.UnstarEntryRequest( + name="name_value", + ) + + # Make the request + response = client.unstar_entry(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_UnstarEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_async.py new file mode 100644 index 000000000000..edfcd1577e86 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_UpdateEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_update_entry(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + entry = datacatalog_v1.Entry() + entry.type_ = "LOOK" + entry.integrated_system = "VERTEX_AI" + entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] + + request = datacatalog_v1.UpdateEntryRequest( + entry=entry, + ) + + # Make the request + response = await client.update_entry(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_UpdateEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_group_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_group_async.py new file mode 100644 index 000000000000..5d3b8dd18792 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_group_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_UpdateEntryGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_update_entry_group(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.UpdateEntryGroupRequest( + ) + + # Make the request + response = await client.update_entry_group(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_UpdateEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_group_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_group_sync.py new file mode 100644 index 000000000000..f6ea137ae8bc --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_group_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_UpdateEntryGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_update_entry_group(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.UpdateEntryGroupRequest( + ) + + # Make the request + response = client.update_entry_group(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_UpdateEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_sync.py new file mode 100644 index 000000000000..ae8cedd82586 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_UpdateEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_update_entry(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + entry = datacatalog_v1.Entry() + entry.type_ = "LOOK" + entry.integrated_system = "VERTEX_AI" + entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] + + request = datacatalog_v1.UpdateEntryRequest( + entry=entry, + ) + + # Make the request + response = client.update_entry(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_UpdateEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_async.py new file mode 100644 index 000000000000..7ea7f33ebeb5 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_UpdateTag_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_update_tag(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + tag = datacatalog_v1.Tag() + tag.column = "column_value" + tag.template = "template_value" + + request = datacatalog_v1.UpdateTagRequest( + tag=tag, + ) + + # Make the request + response = await client.update_tag(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_UpdateTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_sync.py new file mode 100644 index 000000000000..71cca56b1f26 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_UpdateTag_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_update_tag(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + tag = datacatalog_v1.Tag() + tag.column = "column_value" + tag.template = "template_value" + + request = datacatalog_v1.UpdateTagRequest( + tag=tag, + ) + + # Make the request + response = client.update_tag(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_UpdateTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_async.py new file mode 100644 index 000000000000..1ab7aaacdfc6 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTagTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_UpdateTagTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_update_tag_template(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.UpdateTagTemplateRequest( + ) + + # Make the request + response = await client.update_tag_template(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_UpdateTagTemplate_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_field_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_field_async.py new file mode 100644 index 000000000000..3da6486ccde2 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_field_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTagTemplateField +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_UpdateTagTemplateField_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_update_tag_template_field(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + tag_template_field = datacatalog_v1.TagTemplateField() + tag_template_field.type_.primitive_type = "RICHTEXT" + + request = datacatalog_v1.UpdateTagTemplateFieldRequest( + name="name_value", + tag_template_field=tag_template_field, + ) + + # Make the request + response = await client.update_tag_template_field(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_UpdateTagTemplateField_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_field_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_field_sync.py new file mode 100644 index 000000000000..fc4321e3f4ff --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_field_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTagTemplateField +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_UpdateTagTemplateField_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_update_tag_template_field(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + tag_template_field = datacatalog_v1.TagTemplateField() + tag_template_field.type_.primitive_type = "RICHTEXT" + + request = datacatalog_v1.UpdateTagTemplateFieldRequest( + name="name_value", + tag_template_field=tag_template_field, + ) + + # Make the request + response = client.update_tag_template_field(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_UpdateTagTemplateField_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_sync.py new file mode 100644 index 000000000000..47b91a536181 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTagTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_UpdateTagTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_update_tag_template(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.UpdateTagTemplateRequest( + ) + + # Make the request + response = client.update_tag_template(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_UpdateTagTemplate_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_policy_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_policy_tag_async.py new file mode 100644 index 000000000000..c1b831646046 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_policy_tag_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePolicyTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_CreatePolicyTag_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_create_policy_tag(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.CreatePolicyTagRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_policy_tag(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManager_CreatePolicyTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_policy_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_policy_tag_sync.py new file mode 100644 index 000000000000..6dba3cab9627 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_policy_tag_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePolicyTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_CreatePolicyTag_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_create_policy_tag(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1.CreatePolicyTagRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_policy_tag(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManager_CreatePolicyTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_taxonomy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_taxonomy_async.py new file mode 100644 index 000000000000..5b4ea405a91a --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_taxonomy_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_CreateTaxonomy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_create_taxonomy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.CreateTaxonomyRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_taxonomy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManager_CreateTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_taxonomy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_taxonomy_sync.py new file mode 100644 index 000000000000..8eeab4707753 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_taxonomy_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_CreateTaxonomy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_create_taxonomy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1.CreateTaxonomyRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_taxonomy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManager_CreateTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_async.py new file mode 100644 index 000000000000..7da5d688f254 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePolicyTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_DeletePolicyTag_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_delete_policy_tag(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeletePolicyTagRequest( + name="name_value", + ) + + # Make the request + await client.delete_policy_tag(request=request) + + +# [END datacatalog_v1_generated_PolicyTagManager_DeletePolicyTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_sync.py new file mode 100644 index 000000000000..6b6ef2e28a3b --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePolicyTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_DeletePolicyTag_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_delete_policy_tag(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeletePolicyTagRequest( + name="name_value", + ) + + # Make the request + client.delete_policy_tag(request=request) + + +# [END datacatalog_v1_generated_PolicyTagManager_DeletePolicyTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_async.py new file mode 100644 index 000000000000..e98b96756c6d --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_DeleteTaxonomy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_delete_taxonomy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteTaxonomyRequest( + name="name_value", + ) + + # Make the request + await client.delete_taxonomy(request=request) + + +# [END datacatalog_v1_generated_PolicyTagManager_DeleteTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_sync.py new file mode 100644 index 000000000000..d96da5017b32 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_DeleteTaxonomy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_delete_taxonomy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1.DeleteTaxonomyRequest( + name="name_value", + ) + + # Make the request + client.delete_taxonomy(request=request) + + +# [END datacatalog_v1_generated_PolicyTagManager_DeleteTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_iam_policy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_iam_policy_async.py new file mode 100644 index 000000000000..1116ba587bae --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_iam_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_GetIamPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_get_iam_policy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManager_GetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_iam_policy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_iam_policy_sync.py new file mode 100644 index 000000000000..2c81ed5468ed --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_get_iam_policy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManager_GetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_policy_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_policy_tag_async.py new file mode 100644 index 000000000000..55fbf69a7c1f --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_policy_tag_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPolicyTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_GetPolicyTag_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_get_policy_tag(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.GetPolicyTagRequest( + name="name_value", + ) + + # Make the request + response = await client.get_policy_tag(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManager_GetPolicyTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_policy_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_policy_tag_sync.py new file mode 100644 index 000000000000..c3c9fd284b3b --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_policy_tag_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPolicyTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_GetPolicyTag_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_get_policy_tag(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1.GetPolicyTagRequest( + name="name_value", + ) + + # Make the request + response = client.get_policy_tag(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManager_GetPolicyTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_taxonomy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_taxonomy_async.py new file mode 100644 index 000000000000..862f3c819340 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_taxonomy_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_GetTaxonomy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_get_taxonomy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.GetTaxonomyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_taxonomy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManager_GetTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_taxonomy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_taxonomy_sync.py new file mode 100644 index 000000000000..3861a49aab88 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_taxonomy_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_GetTaxonomy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_get_taxonomy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1.GetTaxonomyRequest( + name="name_value", + ) + + # Make the request + response = client.get_taxonomy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManager_GetTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_policy_tags_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_policy_tags_async.py new file mode 100644 index 000000000000..c8237c15a626 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_policy_tags_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPolicyTags +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_ListPolicyTags_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_list_policy_tags(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.ListPolicyTagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_policy_tags(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datacatalog_v1_generated_PolicyTagManager_ListPolicyTags_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_policy_tags_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_policy_tags_sync.py new file mode 100644 index 000000000000..00cd03844f48 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_policy_tags_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPolicyTags +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_ListPolicyTags_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_list_policy_tags(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1.ListPolicyTagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_policy_tags(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datacatalog_v1_generated_PolicyTagManager_ListPolicyTags_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_taxonomies_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_taxonomies_async.py new file mode 100644 index 000000000000..f497579b1b1c --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_taxonomies_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTaxonomies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_ListTaxonomies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_list_taxonomies(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.ListTaxonomiesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_taxonomies(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datacatalog_v1_generated_PolicyTagManager_ListTaxonomies_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_taxonomies_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_taxonomies_sync.py new file mode 100644 index 000000000000..11263b6d90c8 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_taxonomies_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTaxonomies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_ListTaxonomies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_list_taxonomies(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1.ListTaxonomiesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_taxonomies(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datacatalog_v1_generated_PolicyTagManager_ListTaxonomies_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_async.py new file mode 100644 index 000000000000..8bbdbc179490 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportTaxonomies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManagerSerialization_ExportTaxonomies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_export_taxonomies(): + # Create a client + client = datacatalog_v1.PolicyTagManagerSerializationAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.ExportTaxonomiesRequest( + serialized_taxonomies=True, + parent="parent_value", + taxonomies=['taxonomies_value1', 'taxonomies_value2'], + ) + + # Make the request + response = await client.export_taxonomies(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManagerSerialization_ExportTaxonomies_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py new file mode 100644 index 000000000000..1f30c159f594 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportTaxonomies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManagerSerialization_ExportTaxonomies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_export_taxonomies(): + # Create a client + client = datacatalog_v1.PolicyTagManagerSerializationClient() + + # Initialize request argument(s) + request = datacatalog_v1.ExportTaxonomiesRequest( + serialized_taxonomies=True, + parent="parent_value", + taxonomies=['taxonomies_value1', 'taxonomies_value2'], + ) + + # Make the request + response = client.export_taxonomies(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManagerSerialization_ExportTaxonomies_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_async.py new file mode 100644 index 000000000000..2afe477870f2 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportTaxonomies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManagerSerialization_ImportTaxonomies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_import_taxonomies(): + # Create a client + client = datacatalog_v1.PolicyTagManagerSerializationAsyncClient() + + # Initialize request argument(s) + inline_source = datacatalog_v1.InlineSource() + inline_source.taxonomies.display_name = "display_name_value" + + request = datacatalog_v1.ImportTaxonomiesRequest( + inline_source=inline_source, + parent="parent_value", + ) + + # Make the request + response = await client.import_taxonomies(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManagerSerialization_ImportTaxonomies_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py new file mode 100644 index 000000000000..0f6462dbca8a --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportTaxonomies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManagerSerialization_ImportTaxonomies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_import_taxonomies(): + # Create a client + client = datacatalog_v1.PolicyTagManagerSerializationClient() + + # Initialize request argument(s) + inline_source = datacatalog_v1.InlineSource() + inline_source.taxonomies.display_name = "display_name_value" + + request = datacatalog_v1.ImportTaxonomiesRequest( + inline_source=inline_source, + parent="parent_value", + ) + + # Make the request + response = client.import_taxonomies(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManagerSerialization_ImportTaxonomies_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_async.py new file mode 100644 index 000000000000..c1a6c68403b8 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReplaceTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManagerSerialization_ReplaceTaxonomy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_replace_taxonomy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerSerializationAsyncClient() + + # Initialize request argument(s) + serialized_taxonomy = datacatalog_v1.SerializedTaxonomy() + serialized_taxonomy.display_name = "display_name_value" + + request = datacatalog_v1.ReplaceTaxonomyRequest( + name="name_value", + serialized_taxonomy=serialized_taxonomy, + ) + + # Make the request + response = await client.replace_taxonomy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManagerSerialization_ReplaceTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_sync.py new file mode 100644 index 000000000000..947acd23c470 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReplaceTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManagerSerialization_ReplaceTaxonomy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_replace_taxonomy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerSerializationClient() + + # Initialize request argument(s) + serialized_taxonomy = datacatalog_v1.SerializedTaxonomy() + serialized_taxonomy.display_name = "display_name_value" + + request = datacatalog_v1.ReplaceTaxonomyRequest( + name="name_value", + serialized_taxonomy=serialized_taxonomy, + ) + + # Make the request + response = client.replace_taxonomy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManagerSerialization_ReplaceTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_set_iam_policy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_set_iam_policy_async.py new file mode 100644 index 000000000000..2bf7b337ab49 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_set_iam_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_SetIamPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_set_iam_policy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManager_SetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_set_iam_policy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_set_iam_policy_sync.py new file mode 100644 index 000000000000..6703fdd8ef3a --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_set_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_set_iam_policy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManager_SetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_async.py new file mode 100644 index 000000000000..7b1962ac8ca7 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_TestIamPermissions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_test_iam_permissions(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManager_TestIamPermissions_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_sync.py new file mode 100644 index 000000000000..b0ed75bd1818 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_test_iam_permissions(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManager_TestIamPermissions_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_policy_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_policy_tag_async.py new file mode 100644 index 000000000000..8c6afcd5a236 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_policy_tag_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdatePolicyTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_UpdatePolicyTag_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_update_policy_tag(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.UpdatePolicyTagRequest( + ) + + # Make the request + response = await client.update_policy_tag(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManager_UpdatePolicyTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_policy_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_policy_tag_sync.py new file mode 100644 index 000000000000..e7ccc2ce0409 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_policy_tag_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdatePolicyTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_UpdatePolicyTag_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_update_policy_tag(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1.UpdatePolicyTagRequest( + ) + + # Make the request + response = client.update_policy_tag(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManager_UpdatePolicyTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_taxonomy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_taxonomy_async.py new file mode 100644 index 000000000000..8f2e122eaaaa --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_taxonomy_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_UpdateTaxonomy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_update_taxonomy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.UpdateTaxonomyRequest( + ) + + # Make the request + response = await client.update_taxonomy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManager_UpdateTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_taxonomy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_taxonomy_sync.py new file mode 100644 index 000000000000..0d88c2b173d5 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_taxonomy_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_PolicyTagManager_UpdateTaxonomy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_update_taxonomy(): + # Create a client + client = datacatalog_v1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1.UpdateTaxonomyRequest( + ) + + # Make the request + response = client.update_taxonomy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_PolicyTagManager_UpdateTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json new file mode 100644 index 000000000000..1659a652e62c --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json @@ -0,0 +1,8111 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.datacatalog.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-datacatalog", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.create_entry_group", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.CreateEntryGroup", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "CreateEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.CreateEntryGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry_group_id", + "type": "str" + }, + { + "name": "entry_group", + "type": "google.cloud.datacatalog_v1.types.EntryGroup" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.EntryGroup", + "shortName": "create_entry_group" + }, + "description": "Sample for CreateEntryGroup", + "file": "datacatalog_v1_generated_data_catalog_create_entry_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_CreateEntryGroup_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_create_entry_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.create_entry_group", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.CreateEntryGroup", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "CreateEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.CreateEntryGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry_group_id", + "type": "str" + }, + { + "name": "entry_group", + "type": "google.cloud.datacatalog_v1.types.EntryGroup" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.EntryGroup", + "shortName": "create_entry_group" + }, + "description": "Sample for CreateEntryGroup", + "file": "datacatalog_v1_generated_data_catalog_create_entry_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_CreateEntryGroup_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_create_entry_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.create_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.CreateEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "CreateEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.CreateEntryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry_id", + "type": "str" + }, + { + "name": "entry", + "type": "google.cloud.datacatalog_v1.types.Entry" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.Entry", + "shortName": "create_entry" + }, + "description": "Sample for CreateEntry", + "file": "datacatalog_v1_generated_data_catalog_create_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_CreateEntry_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_create_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.create_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.CreateEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "CreateEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.CreateEntryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry_id", + "type": "str" + }, + { + "name": "entry", + "type": "google.cloud.datacatalog_v1.types.Entry" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.Entry", + "shortName": "create_entry" + }, + "description": "Sample for CreateEntry", + "file": "datacatalog_v1_generated_data_catalog_create_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_CreateEntry_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_create_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.create_tag_template_field", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.CreateTagTemplateField", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "CreateTagTemplateField" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.CreateTagTemplateFieldRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "tag_template_field_id", + "type": "str" + }, + { + "name": "tag_template_field", + "type": "google.cloud.datacatalog_v1.types.TagTemplateField" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.TagTemplateField", + "shortName": "create_tag_template_field" + }, + "description": "Sample for CreateTagTemplateField", + "file": "datacatalog_v1_generated_data_catalog_create_tag_template_field_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_CreateTagTemplateField_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_create_tag_template_field_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.create_tag_template_field", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.CreateTagTemplateField", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "CreateTagTemplateField" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.CreateTagTemplateFieldRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "tag_template_field_id", + "type": "str" + }, + { + "name": "tag_template_field", + "type": "google.cloud.datacatalog_v1.types.TagTemplateField" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.TagTemplateField", + "shortName": "create_tag_template_field" + }, + "description": "Sample for CreateTagTemplateField", + "file": "datacatalog_v1_generated_data_catalog_create_tag_template_field_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_CreateTagTemplateField_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_create_tag_template_field_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.create_tag_template", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.CreateTagTemplate", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "CreateTagTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.CreateTagTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "tag_template_id", + "type": "str" + }, + { + "name": "tag_template", + "type": "google.cloud.datacatalog_v1.types.TagTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.TagTemplate", + "shortName": "create_tag_template" + }, + "description": "Sample for CreateTagTemplate", + "file": "datacatalog_v1_generated_data_catalog_create_tag_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_CreateTagTemplate_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_create_tag_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.create_tag_template", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.CreateTagTemplate", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "CreateTagTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.CreateTagTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "tag_template_id", + "type": "str" + }, + { + "name": "tag_template", + "type": "google.cloud.datacatalog_v1.types.TagTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.TagTemplate", + "shortName": "create_tag_template" + }, + "description": "Sample for CreateTagTemplate", + "file": "datacatalog_v1_generated_data_catalog_create_tag_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_CreateTagTemplate_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_create_tag_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.create_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.CreateTag", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "CreateTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.CreateTagRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "tag", + "type": "google.cloud.datacatalog_v1.types.Tag" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.Tag", + "shortName": "create_tag" + }, + "description": "Sample for CreateTag", + "file": "datacatalog_v1_generated_data_catalog_create_tag_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_CreateTag_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_create_tag_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.create_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.CreateTag", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "CreateTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.CreateTagRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "tag", + "type": "google.cloud.datacatalog_v1.types.Tag" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.Tag", + "shortName": "create_tag" + }, + "description": "Sample for CreateTag", + "file": "datacatalog_v1_generated_data_catalog_create_tag_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_CreateTag_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_create_tag_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.delete_entry_group", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.DeleteEntryGroup", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "DeleteEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.DeleteEntryGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_entry_group" + }, + "description": "Sample for DeleteEntryGroup", + "file": "datacatalog_v1_generated_data_catalog_delete_entry_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_DeleteEntryGroup_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_delete_entry_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.delete_entry_group", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.DeleteEntryGroup", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "DeleteEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.DeleteEntryGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_entry_group" + }, + "description": "Sample for DeleteEntryGroup", + "file": "datacatalog_v1_generated_data_catalog_delete_entry_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_DeleteEntryGroup_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_delete_entry_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.delete_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.DeleteEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "DeleteEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.DeleteEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_entry" + }, + "description": "Sample for DeleteEntry", + "file": "datacatalog_v1_generated_data_catalog_delete_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_DeleteEntry_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_delete_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.delete_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.DeleteEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "DeleteEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.DeleteEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_entry" + }, + "description": "Sample for DeleteEntry", + "file": "datacatalog_v1_generated_data_catalog_delete_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_DeleteEntry_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_delete_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.delete_tag_template_field", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.DeleteTagTemplateField", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "DeleteTagTemplateField" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.DeleteTagTemplateFieldRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "force", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_tag_template_field" + }, + "description": "Sample for DeleteTagTemplateField", + "file": "datacatalog_v1_generated_data_catalog_delete_tag_template_field_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_DeleteTagTemplateField_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_delete_tag_template_field_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.delete_tag_template_field", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.DeleteTagTemplateField", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "DeleteTagTemplateField" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.DeleteTagTemplateFieldRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "force", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_tag_template_field" + }, + "description": "Sample for DeleteTagTemplateField", + "file": "datacatalog_v1_generated_data_catalog_delete_tag_template_field_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_DeleteTagTemplateField_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_delete_tag_template_field_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.delete_tag_template", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.DeleteTagTemplate", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "DeleteTagTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.DeleteTagTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "force", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_tag_template" + }, + "description": "Sample for DeleteTagTemplate", + "file": "datacatalog_v1_generated_data_catalog_delete_tag_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_DeleteTagTemplate_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_delete_tag_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.delete_tag_template", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.DeleteTagTemplate", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "DeleteTagTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.DeleteTagTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "force", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_tag_template" + }, + "description": "Sample for DeleteTagTemplate", + "file": "datacatalog_v1_generated_data_catalog_delete_tag_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_DeleteTagTemplate_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_delete_tag_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.delete_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.DeleteTag", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "DeleteTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.DeleteTagRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_tag" + }, + "description": "Sample for DeleteTag", + "file": "datacatalog_v1_generated_data_catalog_delete_tag_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_DeleteTag_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_delete_tag_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.delete_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.DeleteTag", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "DeleteTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.DeleteTagRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_tag" + }, + "description": "Sample for DeleteTag", + "file": "datacatalog_v1_generated_data_catalog_delete_tag_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_DeleteTag_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_delete_tag_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.get_entry_group", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.GetEntryGroup", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "GetEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.GetEntryGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "read_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.EntryGroup", + "shortName": "get_entry_group" + }, + "description": "Sample for GetEntryGroup", + "file": "datacatalog_v1_generated_data_catalog_get_entry_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_GetEntryGroup_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_get_entry_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.get_entry_group", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.GetEntryGroup", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "GetEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.GetEntryGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "read_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.EntryGroup", + "shortName": "get_entry_group" + }, + "description": "Sample for GetEntryGroup", + "file": "datacatalog_v1_generated_data_catalog_get_entry_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_GetEntryGroup_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_get_entry_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.get_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.GetEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "GetEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.GetEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.Entry", + "shortName": "get_entry" + }, + "description": "Sample for GetEntry", + "file": "datacatalog_v1_generated_data_catalog_get_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_GetEntry_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_get_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.get_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.GetEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "GetEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.GetEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.Entry", + "shortName": "get_entry" + }, + "description": "Sample for GetEntry", + "file": "datacatalog_v1_generated_data_catalog_get_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_GetEntry_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_get_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.get_iam_policy", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.GetIamPolicy", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "datacatalog_v1_generated_data_catalog_get_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_GetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_get_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.get_iam_policy", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.GetIamPolicy", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "datacatalog_v1_generated_data_catalog_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_GetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.get_tag_template", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.GetTagTemplate", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "GetTagTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.GetTagTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.TagTemplate", + "shortName": "get_tag_template" + }, + "description": "Sample for GetTagTemplate", + "file": "datacatalog_v1_generated_data_catalog_get_tag_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_GetTagTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_get_tag_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.get_tag_template", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.GetTagTemplate", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "GetTagTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.GetTagTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.TagTemplate", + "shortName": "get_tag_template" + }, + "description": "Sample for GetTagTemplate", + "file": "datacatalog_v1_generated_data_catalog_get_tag_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_GetTagTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_get_tag_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.import_entries", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.ImportEntries", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "ImportEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ImportEntriesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_entries" + }, + "description": "Sample for ImportEntries", + "file": "datacatalog_v1_generated_data_catalog_import_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_ImportEntries_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_import_entries_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.import_entries", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.ImportEntries", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "ImportEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ImportEntriesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_entries" + }, + "description": "Sample for ImportEntries", + "file": "datacatalog_v1_generated_data_catalog_import_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_ImportEntries_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_import_entries_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.list_entries", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.ListEntries", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "ListEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ListEntriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.services.data_catalog.pagers.ListEntriesAsyncPager", + "shortName": "list_entries" + }, + "description": "Sample for ListEntries", + "file": "datacatalog_v1_generated_data_catalog_list_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_ListEntries_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_list_entries_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.list_entries", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.ListEntries", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "ListEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ListEntriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.services.data_catalog.pagers.ListEntriesPager", + "shortName": "list_entries" + }, + "description": "Sample for ListEntries", + "file": "datacatalog_v1_generated_data_catalog_list_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_ListEntries_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_list_entries_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.list_entry_groups", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.ListEntryGroups", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "ListEntryGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ListEntryGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.services.data_catalog.pagers.ListEntryGroupsAsyncPager", + "shortName": "list_entry_groups" + }, + "description": "Sample for ListEntryGroups", + "file": "datacatalog_v1_generated_data_catalog_list_entry_groups_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_ListEntryGroups_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_list_entry_groups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.list_entry_groups", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.ListEntryGroups", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "ListEntryGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ListEntryGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.services.data_catalog.pagers.ListEntryGroupsPager", + "shortName": "list_entry_groups" + }, + "description": "Sample for ListEntryGroups", + "file": "datacatalog_v1_generated_data_catalog_list_entry_groups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_ListEntryGroups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_list_entry_groups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.list_tags", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.ListTags", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "ListTags" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ListTagsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.services.data_catalog.pagers.ListTagsAsyncPager", + "shortName": "list_tags" + }, + "description": "Sample for ListTags", + "file": "datacatalog_v1_generated_data_catalog_list_tags_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_ListTags_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_list_tags_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.list_tags", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.ListTags", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "ListTags" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ListTagsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.services.data_catalog.pagers.ListTagsPager", + "shortName": "list_tags" + }, + "description": "Sample for ListTags", + "file": "datacatalog_v1_generated_data_catalog_list_tags_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_ListTags_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_list_tags_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.lookup_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.LookupEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "LookupEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.LookupEntryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.Entry", + "shortName": "lookup_entry" + }, + "description": "Sample for LookupEntry", + "file": "datacatalog_v1_generated_data_catalog_lookup_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_LookupEntry_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_lookup_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.lookup_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.LookupEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "LookupEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.LookupEntryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.Entry", + "shortName": "lookup_entry" + }, + "description": "Sample for LookupEntry", + "file": "datacatalog_v1_generated_data_catalog_lookup_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_LookupEntry_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_lookup_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.modify_entry_contacts", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.ModifyEntryContacts", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "ModifyEntryContacts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ModifyEntryContactsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.Contacts", + "shortName": "modify_entry_contacts" + }, + "description": "Sample for ModifyEntryContacts", + "file": "datacatalog_v1_generated_data_catalog_modify_entry_contacts_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_ModifyEntryContacts_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_modify_entry_contacts_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.modify_entry_contacts", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.ModifyEntryContacts", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "ModifyEntryContacts" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ModifyEntryContactsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.Contacts", + "shortName": "modify_entry_contacts" + }, + "description": "Sample for ModifyEntryContacts", + "file": "datacatalog_v1_generated_data_catalog_modify_entry_contacts_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_ModifyEntryContacts_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_modify_entry_contacts_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.modify_entry_overview", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.ModifyEntryOverview", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "ModifyEntryOverview" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ModifyEntryOverviewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.EntryOverview", + "shortName": "modify_entry_overview" + }, + "description": "Sample for ModifyEntryOverview", + "file": "datacatalog_v1_generated_data_catalog_modify_entry_overview_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_ModifyEntryOverview_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_modify_entry_overview_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.modify_entry_overview", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.ModifyEntryOverview", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "ModifyEntryOverview" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ModifyEntryOverviewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.EntryOverview", + "shortName": "modify_entry_overview" + }, + "description": "Sample for ModifyEntryOverview", + "file": "datacatalog_v1_generated_data_catalog_modify_entry_overview_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_ModifyEntryOverview_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_modify_entry_overview_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.reconcile_tags", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.ReconcileTags", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "ReconcileTags" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ReconcileTagsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "reconcile_tags" + }, + "description": "Sample for ReconcileTags", + "file": "datacatalog_v1_generated_data_catalog_reconcile_tags_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_ReconcileTags_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_reconcile_tags_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.reconcile_tags", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.ReconcileTags", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "ReconcileTags" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ReconcileTagsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "reconcile_tags" + }, + "description": "Sample for ReconcileTags", + "file": "datacatalog_v1_generated_data_catalog_reconcile_tags_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_ReconcileTags_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_reconcile_tags_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.rename_tag_template_field_enum_value", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateFieldEnumValue", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "RenameTagTemplateFieldEnumValue" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.RenameTagTemplateFieldEnumValueRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "new_enum_value_display_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.TagTemplateField", + "shortName": "rename_tag_template_field_enum_value" + }, + "description": "Sample for RenameTagTemplateFieldEnumValue", + "file": "datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.rename_tag_template_field_enum_value", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateFieldEnumValue", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "RenameTagTemplateFieldEnumValue" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.RenameTagTemplateFieldEnumValueRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "new_enum_value_display_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.TagTemplateField", + "shortName": "rename_tag_template_field_enum_value" + }, + "description": "Sample for RenameTagTemplateFieldEnumValue", + "file": "datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.rename_tag_template_field", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateField", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "RenameTagTemplateField" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.RenameTagTemplateFieldRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "new_tag_template_field_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.TagTemplateField", + "shortName": "rename_tag_template_field" + }, + "description": "Sample for RenameTagTemplateField", + "file": "datacatalog_v1_generated_data_catalog_rename_tag_template_field_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_RenameTagTemplateField_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_rename_tag_template_field_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.rename_tag_template_field", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateField", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "RenameTagTemplateField" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.RenameTagTemplateFieldRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "new_tag_template_field_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.TagTemplateField", + "shortName": "rename_tag_template_field" + }, + "description": "Sample for RenameTagTemplateField", + "file": "datacatalog_v1_generated_data_catalog_rename_tag_template_field_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_RenameTagTemplateField_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_rename_tag_template_field_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.search_catalog", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.SearchCatalog", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "SearchCatalog" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.SearchCatalogRequest" + }, + { + "name": "scope", + "type": "google.cloud.datacatalog_v1.types.SearchCatalogRequest.Scope" + }, + { + "name": "query", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.services.data_catalog.pagers.SearchCatalogAsyncPager", + "shortName": "search_catalog" + }, + "description": "Sample for SearchCatalog", + "file": "datacatalog_v1_generated_data_catalog_search_catalog_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_SearchCatalog_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_search_catalog_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.search_catalog", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.SearchCatalog", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "SearchCatalog" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.SearchCatalogRequest" + }, + { + "name": "scope", + "type": "google.cloud.datacatalog_v1.types.SearchCatalogRequest.Scope" + }, + { + "name": "query", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.services.data_catalog.pagers.SearchCatalogPager", + "shortName": "search_catalog" + }, + "description": "Sample for SearchCatalog", + "file": "datacatalog_v1_generated_data_catalog_search_catalog_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_SearchCatalog_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_search_catalog_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.set_iam_policy", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.SetIamPolicy", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "datacatalog_v1_generated_data_catalog_set_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_SetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_set_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.set_iam_policy", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.SetIamPolicy", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "datacatalog_v1_generated_data_catalog_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_SetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.star_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.StarEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "StarEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.StarEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.StarEntryResponse", + "shortName": "star_entry" + }, + "description": "Sample for StarEntry", + "file": "datacatalog_v1_generated_data_catalog_star_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_StarEntry_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_star_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.star_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.StarEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "StarEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.StarEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.StarEntryResponse", + "shortName": "star_entry" + }, + "description": "Sample for StarEntry", + "file": "datacatalog_v1_generated_data_catalog_star_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_StarEntry_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_star_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.TestIamPermissions", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "datacatalog_v1_generated_data_catalog_test_iam_permissions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_TestIamPermissions_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_test_iam_permissions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.TestIamPermissions", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "datacatalog_v1_generated_data_catalog_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.unstar_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.UnstarEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "UnstarEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.UnstarEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.UnstarEntryResponse", + "shortName": "unstar_entry" + }, + "description": "Sample for UnstarEntry", + "file": "datacatalog_v1_generated_data_catalog_unstar_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_UnstarEntry_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_unstar_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.unstar_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.UnstarEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "UnstarEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.UnstarEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.UnstarEntryResponse", + "shortName": "unstar_entry" + }, + "description": "Sample for UnstarEntry", + "file": "datacatalog_v1_generated_data_catalog_unstar_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_UnstarEntry_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_unstar_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.update_entry_group", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.UpdateEntryGroup", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "UpdateEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.UpdateEntryGroupRequest" + }, + { + "name": "entry_group", + "type": "google.cloud.datacatalog_v1.types.EntryGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.EntryGroup", + "shortName": "update_entry_group" + }, + "description": "Sample for UpdateEntryGroup", + "file": "datacatalog_v1_generated_data_catalog_update_entry_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_UpdateEntryGroup_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_update_entry_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.update_entry_group", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.UpdateEntryGroup", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "UpdateEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.UpdateEntryGroupRequest" + }, + { + "name": "entry_group", + "type": "google.cloud.datacatalog_v1.types.EntryGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.EntryGroup", + "shortName": "update_entry_group" + }, + "description": "Sample for UpdateEntryGroup", + "file": "datacatalog_v1_generated_data_catalog_update_entry_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_UpdateEntryGroup_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_update_entry_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.update_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.UpdateEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "UpdateEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.UpdateEntryRequest" + }, + { + "name": "entry", + "type": "google.cloud.datacatalog_v1.types.Entry" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.Entry", + "shortName": "update_entry" + }, + "description": "Sample for UpdateEntry", + "file": "datacatalog_v1_generated_data_catalog_update_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_UpdateEntry_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_update_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.update_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.UpdateEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "UpdateEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.UpdateEntryRequest" + }, + { + "name": "entry", + "type": "google.cloud.datacatalog_v1.types.Entry" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.Entry", + "shortName": "update_entry" + }, + "description": "Sample for UpdateEntry", + "file": "datacatalog_v1_generated_data_catalog_update_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_UpdateEntry_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_update_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.update_tag_template_field", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.UpdateTagTemplateField", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "UpdateTagTemplateField" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.UpdateTagTemplateFieldRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "tag_template_field", + "type": "google.cloud.datacatalog_v1.types.TagTemplateField" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.TagTemplateField", + "shortName": "update_tag_template_field" + }, + "description": "Sample for UpdateTagTemplateField", + "file": "datacatalog_v1_generated_data_catalog_update_tag_template_field_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_UpdateTagTemplateField_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_update_tag_template_field_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.update_tag_template_field", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.UpdateTagTemplateField", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "UpdateTagTemplateField" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.UpdateTagTemplateFieldRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "tag_template_field", + "type": "google.cloud.datacatalog_v1.types.TagTemplateField" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.TagTemplateField", + "shortName": "update_tag_template_field" + }, + "description": "Sample for UpdateTagTemplateField", + "file": "datacatalog_v1_generated_data_catalog_update_tag_template_field_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_UpdateTagTemplateField_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_update_tag_template_field_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.update_tag_template", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.UpdateTagTemplate", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "UpdateTagTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.UpdateTagTemplateRequest" + }, + { + "name": "tag_template", + "type": "google.cloud.datacatalog_v1.types.TagTemplate" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.TagTemplate", + "shortName": "update_tag_template" + }, + "description": "Sample for UpdateTagTemplate", + "file": "datacatalog_v1_generated_data_catalog_update_tag_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_UpdateTagTemplate_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_update_tag_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.update_tag_template", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.UpdateTagTemplate", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "UpdateTagTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.UpdateTagTemplateRequest" + }, + { + "name": "tag_template", + "type": "google.cloud.datacatalog_v1.types.TagTemplate" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.TagTemplate", + "shortName": "update_tag_template" + }, + "description": "Sample for UpdateTagTemplate", + "file": "datacatalog_v1_generated_data_catalog_update_tag_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_UpdateTagTemplate_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_update_tag_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.update_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.UpdateTag", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "UpdateTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.UpdateTagRequest" + }, + { + "name": "tag", + "type": "google.cloud.datacatalog_v1.types.Tag" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.Tag", + "shortName": "update_tag" + }, + "description": "Sample for UpdateTag", + "file": "datacatalog_v1_generated_data_catalog_update_tag_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_UpdateTag_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_update_tag_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.update_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.UpdateTag", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "UpdateTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.UpdateTagRequest" + }, + { + "name": "tag", + "type": "google.cloud.datacatalog_v1.types.Tag" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.Tag", + "shortName": "update_tag" + }, + "description": "Sample for UpdateTag", + "file": "datacatalog_v1_generated_data_catalog_update_tag_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_UpdateTag_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_update_tag_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationAsyncClient", + "shortName": "PolicyTagManagerSerializationAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationAsyncClient.export_taxonomies", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization", + "shortName": "PolicyTagManagerSerialization" + }, + "shortName": "ExportTaxonomies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ExportTaxonomiesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.ExportTaxonomiesResponse", + "shortName": "export_taxonomies" + }, + "description": "Sample for ExportTaxonomies", + "file": "datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManagerSerialization_ExportTaxonomies_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationClient", + "shortName": "PolicyTagManagerSerializationClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationClient.export_taxonomies", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization", + "shortName": "PolicyTagManagerSerialization" + }, + "shortName": "ExportTaxonomies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ExportTaxonomiesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.ExportTaxonomiesResponse", + "shortName": "export_taxonomies" + }, + "description": "Sample for ExportTaxonomies", + "file": "datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManagerSerialization_ExportTaxonomies_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationAsyncClient", + "shortName": "PolicyTagManagerSerializationAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationAsyncClient.import_taxonomies", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization", + "shortName": "PolicyTagManagerSerialization" + }, + "shortName": "ImportTaxonomies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ImportTaxonomiesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.ImportTaxonomiesResponse", + "shortName": "import_taxonomies" + }, + "description": "Sample for ImportTaxonomies", + "file": "datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManagerSerialization_ImportTaxonomies_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationClient", + "shortName": "PolicyTagManagerSerializationClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationClient.import_taxonomies", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization", + "shortName": "PolicyTagManagerSerialization" + }, + "shortName": "ImportTaxonomies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ImportTaxonomiesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.ImportTaxonomiesResponse", + "shortName": "import_taxonomies" + }, + "description": "Sample for ImportTaxonomies", + "file": "datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManagerSerialization_ImportTaxonomies_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationAsyncClient", + "shortName": "PolicyTagManagerSerializationAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationAsyncClient.replace_taxonomy", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ReplaceTaxonomy", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization", + "shortName": "PolicyTagManagerSerialization" + }, + "shortName": "ReplaceTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ReplaceTaxonomyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.Taxonomy", + "shortName": "replace_taxonomy" + }, + "description": "Sample for ReplaceTaxonomy", + "file": "datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManagerSerialization_ReplaceTaxonomy_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationClient", + "shortName": "PolicyTagManagerSerializationClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationClient.replace_taxonomy", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ReplaceTaxonomy", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization", + "shortName": "PolicyTagManagerSerialization" + }, + "shortName": "ReplaceTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ReplaceTaxonomyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.Taxonomy", + "shortName": "replace_taxonomy" + }, + "description": "Sample for ReplaceTaxonomy", + "file": "datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManagerSerialization_ReplaceTaxonomy_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.create_policy_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.CreatePolicyTag", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "CreatePolicyTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.CreatePolicyTagRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "policy_tag", + "type": "google.cloud.datacatalog_v1.types.PolicyTag" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.PolicyTag", + "shortName": "create_policy_tag" + }, + "description": "Sample for CreatePolicyTag", + "file": "datacatalog_v1_generated_policy_tag_manager_create_policy_tag_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_CreatePolicyTag_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_create_policy_tag_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.create_policy_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.CreatePolicyTag", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "CreatePolicyTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.CreatePolicyTagRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "policy_tag", + "type": "google.cloud.datacatalog_v1.types.PolicyTag" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.PolicyTag", + "shortName": "create_policy_tag" + }, + "description": "Sample for CreatePolicyTag", + "file": "datacatalog_v1_generated_policy_tag_manager_create_policy_tag_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_CreatePolicyTag_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_create_policy_tag_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.create_taxonomy", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.CreateTaxonomy", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "CreateTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.CreateTaxonomyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "taxonomy", + "type": "google.cloud.datacatalog_v1.types.Taxonomy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.Taxonomy", + "shortName": "create_taxonomy" + }, + "description": "Sample for CreateTaxonomy", + "file": "datacatalog_v1_generated_policy_tag_manager_create_taxonomy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_CreateTaxonomy_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_create_taxonomy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.create_taxonomy", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.CreateTaxonomy", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "CreateTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.CreateTaxonomyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "taxonomy", + "type": "google.cloud.datacatalog_v1.types.Taxonomy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.Taxonomy", + "shortName": "create_taxonomy" + }, + "description": "Sample for CreateTaxonomy", + "file": "datacatalog_v1_generated_policy_tag_manager_create_taxonomy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_CreateTaxonomy_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_create_taxonomy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.delete_policy_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.DeletePolicyTag", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "DeletePolicyTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.DeletePolicyTagRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_policy_tag" + }, + "description": "Sample for DeletePolicyTag", + "file": "datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_DeletePolicyTag_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.delete_policy_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.DeletePolicyTag", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "DeletePolicyTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.DeletePolicyTagRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_policy_tag" + }, + "description": "Sample for DeletePolicyTag", + "file": "datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_DeletePolicyTag_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.delete_taxonomy", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.DeleteTaxonomy", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "DeleteTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.DeleteTaxonomyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_taxonomy" + }, + "description": "Sample for DeleteTaxonomy", + "file": "datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_DeleteTaxonomy_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.delete_taxonomy", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.DeleteTaxonomy", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "DeleteTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.DeleteTaxonomyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_taxonomy" + }, + "description": "Sample for DeleteTaxonomy", + "file": "datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_DeleteTaxonomy_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.get_iam_policy", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.GetIamPolicy", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "datacatalog_v1_generated_policy_tag_manager_get_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_GetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_get_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.get_iam_policy", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.GetIamPolicy", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "datacatalog_v1_generated_policy_tag_manager_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_GetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.get_policy_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.GetPolicyTag", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "GetPolicyTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.GetPolicyTagRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.PolicyTag", + "shortName": "get_policy_tag" + }, + "description": "Sample for GetPolicyTag", + "file": "datacatalog_v1_generated_policy_tag_manager_get_policy_tag_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_GetPolicyTag_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_get_policy_tag_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.get_policy_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.GetPolicyTag", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "GetPolicyTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.GetPolicyTagRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.PolicyTag", + "shortName": "get_policy_tag" + }, + "description": "Sample for GetPolicyTag", + "file": "datacatalog_v1_generated_policy_tag_manager_get_policy_tag_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_GetPolicyTag_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_get_policy_tag_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.get_taxonomy", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.GetTaxonomy", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "GetTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.GetTaxonomyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.Taxonomy", + "shortName": "get_taxonomy" + }, + "description": "Sample for GetTaxonomy", + "file": "datacatalog_v1_generated_policy_tag_manager_get_taxonomy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_GetTaxonomy_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_get_taxonomy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.get_taxonomy", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.GetTaxonomy", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "GetTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.GetTaxonomyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.Taxonomy", + "shortName": "get_taxonomy" + }, + "description": "Sample for GetTaxonomy", + "file": "datacatalog_v1_generated_policy_tag_manager_get_taxonomy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_GetTaxonomy_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_get_taxonomy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.list_policy_tags", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "ListPolicyTags" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ListPolicyTagsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListPolicyTagsAsyncPager", + "shortName": "list_policy_tags" + }, + "description": "Sample for ListPolicyTags", + "file": "datacatalog_v1_generated_policy_tag_manager_list_policy_tags_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_ListPolicyTags_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_list_policy_tags_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.list_policy_tags", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "ListPolicyTags" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ListPolicyTagsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListPolicyTagsPager", + "shortName": "list_policy_tags" + }, + "description": "Sample for ListPolicyTags", + "file": "datacatalog_v1_generated_policy_tag_manager_list_policy_tags_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_ListPolicyTags_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_list_policy_tags_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.list_taxonomies", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "ListTaxonomies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ListTaxonomiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListTaxonomiesAsyncPager", + "shortName": "list_taxonomies" + }, + "description": "Sample for ListTaxonomies", + "file": "datacatalog_v1_generated_policy_tag_manager_list_taxonomies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_ListTaxonomies_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_list_taxonomies_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.list_taxonomies", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "ListTaxonomies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.ListTaxonomiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListTaxonomiesPager", + "shortName": "list_taxonomies" + }, + "description": "Sample for ListTaxonomies", + "file": "datacatalog_v1_generated_policy_tag_manager_list_taxonomies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_ListTaxonomies_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_list_taxonomies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.set_iam_policy", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.SetIamPolicy", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "datacatalog_v1_generated_policy_tag_manager_set_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_SetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_set_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.set_iam_policy", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.SetIamPolicy", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "datacatalog_v1_generated_policy_tag_manager_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_SetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.TestIamPermissions", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_TestIamPermissions_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.TestIamPermissions", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.update_policy_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.UpdatePolicyTag", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "UpdatePolicyTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.UpdatePolicyTagRequest" + }, + { + "name": "policy_tag", + "type": "google.cloud.datacatalog_v1.types.PolicyTag" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.PolicyTag", + "shortName": "update_policy_tag" + }, + "description": "Sample for UpdatePolicyTag", + "file": "datacatalog_v1_generated_policy_tag_manager_update_policy_tag_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_UpdatePolicyTag_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_update_policy_tag_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.update_policy_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.UpdatePolicyTag", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "UpdatePolicyTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.UpdatePolicyTagRequest" + }, + { + "name": "policy_tag", + "type": "google.cloud.datacatalog_v1.types.PolicyTag" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.PolicyTag", + "shortName": "update_policy_tag" + }, + "description": "Sample for UpdatePolicyTag", + "file": "datacatalog_v1_generated_policy_tag_manager_update_policy_tag_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_UpdatePolicyTag_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_update_policy_tag_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.update_taxonomy", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.UpdateTaxonomy", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "UpdateTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.UpdateTaxonomyRequest" + }, + { + "name": "taxonomy", + "type": "google.cloud.datacatalog_v1.types.Taxonomy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.Taxonomy", + "shortName": "update_taxonomy" + }, + "description": "Sample for UpdateTaxonomy", + "file": "datacatalog_v1_generated_policy_tag_manager_update_taxonomy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_UpdateTaxonomy_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_update_taxonomy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.update_taxonomy", + "method": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.UpdateTaxonomy", + "service": { + "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "UpdateTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.UpdateTaxonomyRequest" + }, + { + "name": "taxonomy", + "type": "google.cloud.datacatalog_v1.types.Taxonomy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.Taxonomy", + "shortName": "update_taxonomy" + }, + "description": "Sample for UpdateTaxonomy", + "file": "datacatalog_v1_generated_policy_tag_manager_update_taxonomy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_PolicyTagManager_UpdateTaxonomy_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_policy_tag_manager_update_taxonomy_sync.py" + } + ] +} diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/scripts/fixup_datacatalog_v1_keywords.py b/owl-bot-staging/google-cloud-datacatalog/v1/scripts/fixup_datacatalog_v1_keywords.py new file mode 100644 index 000000000000..7fe4359520fe --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/scripts/fixup_datacatalog_v1_keywords.py @@ -0,0 +1,222 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class datacatalogCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_entry': ('parent', 'entry_id', 'entry', ), + 'create_entry_group': ('parent', 'entry_group_id', 'entry_group', ), + 'create_policy_tag': ('parent', 'policy_tag', ), + 'create_tag': ('parent', 'tag', ), + 'create_tag_template': ('parent', 'tag_template_id', 'tag_template', ), + 'create_tag_template_field': ('parent', 'tag_template_field_id', 'tag_template_field', ), + 'create_taxonomy': ('parent', 'taxonomy', ), + 'delete_entry': ('name', ), + 'delete_entry_group': ('name', 'force', ), + 'delete_policy_tag': ('name', ), + 'delete_tag': ('name', ), + 'delete_tag_template': ('name', 'force', ), + 'delete_tag_template_field': ('name', 'force', ), + 'delete_taxonomy': ('name', ), + 'export_taxonomies': ('parent', 'taxonomies', 'serialized_taxonomies', ), + 'get_entry': ('name', ), + 'get_entry_group': ('name', 'read_mask', ), + 'get_iam_policy': ('resource', 'options', ), + 'get_policy_tag': ('name', ), + 'get_tag_template': ('name', ), + 'get_taxonomy': ('name', ), + 'import_entries': ('parent', 'gcs_bucket_path', 'job_id', ), + 'import_taxonomies': ('parent', 'inline_source', 'cross_regional_source', ), + 'list_entries': ('parent', 'page_size', 'page_token', 'read_mask', ), + 'list_entry_groups': ('parent', 'page_size', 'page_token', ), + 'list_policy_tags': ('parent', 'page_size', 'page_token', ), + 'list_tags': ('parent', 'page_size', 'page_token', ), + 'list_taxonomies': ('parent', 'page_size', 'page_token', 'filter', ), + 'lookup_entry': ('linked_resource', 'sql_resource', 'fully_qualified_name', 'project', 'location', ), + 'modify_entry_contacts': ('name', 'contacts', ), + 'modify_entry_overview': ('name', 'entry_overview', ), + 'reconcile_tags': ('parent', 'tag_template', 'force_delete_missing', 'tags', ), + 'rename_tag_template_field': ('name', 'new_tag_template_field_id', ), + 'rename_tag_template_field_enum_value': ('name', 'new_enum_value_display_name', ), + 'replace_taxonomy': ('name', 'serialized_taxonomy', ), + 'search_catalog': ('scope', 'query', 'page_size', 'page_token', 'order_by', 'admin_search', ), + 'set_iam_policy': ('resource', 'policy', 'update_mask', ), + 'star_entry': ('name', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'unstar_entry': ('name', ), + 'update_entry': ('entry', 'update_mask', ), + 'update_entry_group': ('entry_group', 'update_mask', ), + 'update_policy_tag': ('policy_tag', 'update_mask', ), + 'update_tag': ('tag', 'update_mask', ), + 'update_tag_template': ('tag_template', 'update_mask', ), + 'update_tag_template_field': ('name', 'tag_template_field', 'update_mask', ), + 'update_taxonomy': ('taxonomy', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=datacatalogCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the datacatalog client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/setup.py b/owl-bot-staging/google-cloud-datacatalog/v1/setup.py new file mode 100644 index 000000000000..1d59854d3bcb --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/setup.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-datacatalog' + + +description = "Google Cloud Datacatalog API client library" + +version = {} +with open(os.path.join(package_root, 'google/cloud/datacatalog/gapic_version.py')) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", +] +url = "https://github.com/googleapis/python-datacatalog" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.12.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.7.txt new file mode 100644 index 000000000000..2beecf99e0be --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 +grpc-google-iam-v1==0.12.4 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/tests/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/tests/__init__.py new file mode 100644 index 000000000000..1b4db446eb8d --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/__init__.py new file mode 100644 index 000000000000..1b4db446eb8d --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..1b4db446eb8d --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/__init__.py new file mode 100644 index 000000000000..1b4db446eb8d --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_data_catalog.py b/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_data_catalog.py new file mode 100644 index 000000000000..011518302885 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_data_catalog.py @@ -0,0 +1,10354 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.datacatalog_v1.services.data_catalog import DataCatalogAsyncClient +from google.cloud.datacatalog_v1.services.data_catalog import DataCatalogClient +from google.cloud.datacatalog_v1.services.data_catalog import pagers +from google.cloud.datacatalog_v1.services.data_catalog import transports +from google.cloud.datacatalog_v1.types import bigquery +from google.cloud.datacatalog_v1.types import common +from google.cloud.datacatalog_v1.types import data_source +from google.cloud.datacatalog_v1.types import datacatalog +from google.cloud.datacatalog_v1.types import dataplex_spec +from google.cloud.datacatalog_v1.types import gcs_fileset_spec +from google.cloud.datacatalog_v1.types import physical_schema +from google.cloud.datacatalog_v1.types import schema +from google.cloud.datacatalog_v1.types import search +from google.cloud.datacatalog_v1.types import table_spec +from google.cloud.datacatalog_v1.types import tags +from google.cloud.datacatalog_v1.types import timestamps +from google.cloud.datacatalog_v1.types import usage +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DataCatalogClient._get_default_mtls_endpoint(None) is None + assert DataCatalogClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert DataCatalogClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert DataCatalogClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert DataCatalogClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert DataCatalogClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DataCatalogClient, "grpc"), + (DataCatalogAsyncClient, "grpc_asyncio"), +]) +def test_data_catalog_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'datacatalog.googleapis.com:443' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.DataCatalogGrpcTransport, "grpc"), + (transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_data_catalog_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DataCatalogClient, "grpc"), + (DataCatalogAsyncClient, "grpc_asyncio"), +]) +def test_data_catalog_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'datacatalog.googleapis.com:443' + ) + + +def test_data_catalog_client_get_transport_class(): + transport = DataCatalogClient.get_transport_class() + available_transports = [ + transports.DataCatalogGrpcTransport, + ] + assert transport in available_transports + + transport = DataCatalogClient.get_transport_class("grpc") + assert transport == transports.DataCatalogGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc"), + (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(DataCatalogClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogClient)) +@mock.patch.object(DataCatalogAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogAsyncClient)) +def test_data_catalog_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DataCatalogClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DataCatalogClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", "true"), + (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", "false"), + (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(DataCatalogClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogClient)) +@mock.patch.object(DataCatalogAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_data_catalog_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + DataCatalogClient, DataCatalogAsyncClient +]) +@mock.patch.object(DataCatalogClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogClient)) +@mock.patch.object(DataCatalogAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogAsyncClient)) +def test_data_catalog_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc"), + (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_data_catalog_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", grpc_helpers), + (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_data_catalog_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_data_catalog_client_client_options_from_dict(): + with mock.patch('google.cloud.datacatalog_v1.services.data_catalog.transports.DataCatalogGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = DataCatalogClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", grpc_helpers), + (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_data_catalog_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="datacatalog.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.SearchCatalogRequest, + dict, +]) +def test_search_catalog(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_catalog), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.SearchCatalogResponse( + total_size=1086, + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.search_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.SearchCatalogRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchCatalogPager) + assert response.total_size == 1086 + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_search_catalog_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_catalog), + '__call__') as call: + client.search_catalog() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.SearchCatalogRequest() + +@pytest.mark.asyncio +async def test_search_catalog_async(transport: str = 'grpc_asyncio', request_type=datacatalog.SearchCatalogRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_catalog), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.SearchCatalogResponse( + total_size=1086, + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.search_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.SearchCatalogRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchCatalogAsyncPager) + assert response.total_size == 1086 + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_search_catalog_async_from_dict(): + await test_search_catalog_async(request_type=dict) + + +def test_search_catalog_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_catalog), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.SearchCatalogResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.search_catalog( + scope=datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']), + query='query_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].scope + mock_val = datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']) + assert arg == mock_val + arg = args[0].query + mock_val = 'query_value' + assert arg == mock_val + + +def test_search_catalog_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_catalog( + datacatalog.SearchCatalogRequest(), + scope=datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']), + query='query_value', + ) + +@pytest.mark.asyncio +async def test_search_catalog_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_catalog), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.SearchCatalogResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.SearchCatalogResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.search_catalog( + scope=datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']), + query='query_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].scope + mock_val = datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']) + assert arg == mock_val + arg = args[0].query + mock_val = 'query_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_search_catalog_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.search_catalog( + datacatalog.SearchCatalogRequest(), + scope=datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']), + query='query_value', + ) + + +def test_search_catalog_pager(transport_name: str = "grpc"): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_catalog), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + search.SearchCatalogResult(), + search.SearchCatalogResult(), + ], + next_page_token='abc', + ), + datacatalog.SearchCatalogResponse( + results=[], + next_page_token='def', + ), + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + ], + next_page_token='ghi', + ), + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + search.SearchCatalogResult(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.search_catalog(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, search.SearchCatalogResult) + for i in results) +def test_search_catalog_pages(transport_name: str = "grpc"): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_catalog), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + search.SearchCatalogResult(), + search.SearchCatalogResult(), + ], + next_page_token='abc', + ), + datacatalog.SearchCatalogResponse( + results=[], + next_page_token='def', + ), + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + ], + next_page_token='ghi', + ), + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + search.SearchCatalogResult(), + ], + ), + RuntimeError, + ) + pages = list(client.search_catalog(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_search_catalog_async_pager(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_catalog), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + search.SearchCatalogResult(), + search.SearchCatalogResult(), + ], + next_page_token='abc', + ), + datacatalog.SearchCatalogResponse( + results=[], + next_page_token='def', + ), + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + ], + next_page_token='ghi', + ), + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + search.SearchCatalogResult(), + ], + ), + RuntimeError, + ) + async_pager = await client.search_catalog(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, search.SearchCatalogResult) + for i in responses) + + +@pytest.mark.asyncio +async def test_search_catalog_async_pages(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_catalog), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + search.SearchCatalogResult(), + search.SearchCatalogResult(), + ], + next_page_token='abc', + ), + datacatalog.SearchCatalogResponse( + results=[], + next_page_token='def', + ), + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + ], + next_page_token='ghi', + ), + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + search.SearchCatalogResult(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.search_catalog(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + datacatalog.CreateEntryGroupRequest, + dict, +]) +def test_create_entry_group(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.EntryGroup( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.create_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateEntryGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.EntryGroup) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_create_entry_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + client.create_entry_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateEntryGroupRequest() + +@pytest.mark.asyncio +async def test_create_entry_group_async(transport: str = 'grpc_asyncio', request_type=datacatalog.CreateEntryGroupRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.create_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateEntryGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.EntryGroup) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_create_entry_group_async_from_dict(): + await test_create_entry_group_async(request_type=dict) + + +def test_create_entry_group_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.CreateEntryGroupRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + call.return_value = datacatalog.EntryGroup() + client.create_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_entry_group_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.CreateEntryGroupRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) + await client.create_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_entry_group_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.EntryGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_entry_group( + parent='parent_value', + entry_group_id='entry_group_id_value', + entry_group=datacatalog.EntryGroup(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entry_group_id + mock_val = 'entry_group_id_value' + assert arg == mock_val + arg = args[0].entry_group + mock_val = datacatalog.EntryGroup(name='name_value') + assert arg == mock_val + + +def test_create_entry_group_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_entry_group( + datacatalog.CreateEntryGroupRequest(), + parent='parent_value', + entry_group_id='entry_group_id_value', + entry_group=datacatalog.EntryGroup(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_entry_group_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.EntryGroup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_entry_group( + parent='parent_value', + entry_group_id='entry_group_id_value', + entry_group=datacatalog.EntryGroup(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entry_group_id + mock_val = 'entry_group_id_value' + assert arg == mock_val + arg = args[0].entry_group + mock_val = datacatalog.EntryGroup(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_entry_group_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_entry_group( + datacatalog.CreateEntryGroupRequest(), + parent='parent_value', + entry_group_id='entry_group_id_value', + entry_group=datacatalog.EntryGroup(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.GetEntryGroupRequest, + dict, +]) +def test_get_entry_group(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.EntryGroup( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.get_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.GetEntryGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.EntryGroup) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_get_entry_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + client.get_entry_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.GetEntryGroupRequest() + +@pytest.mark.asyncio +async def test_get_entry_group_async(transport: str = 'grpc_asyncio', request_type=datacatalog.GetEntryGroupRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.get_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.GetEntryGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.EntryGroup) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_get_entry_group_async_from_dict(): + await test_get_entry_group_async(request_type=dict) + + +def test_get_entry_group_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.GetEntryGroupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + call.return_value = datacatalog.EntryGroup() + client.get_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_entry_group_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.GetEntryGroupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) + await client.get_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_entry_group_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.EntryGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_entry_group( + name='name_value', + read_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].read_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_get_entry_group_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entry_group( + datacatalog.GetEntryGroupRequest(), + name='name_value', + read_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_get_entry_group_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.EntryGroup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_entry_group( + name='name_value', + read_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].read_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_entry_group_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_entry_group( + datacatalog.GetEntryGroupRequest(), + name='name_value', + read_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.UpdateEntryGroupRequest, + dict, +]) +def test_update_entry_group(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.EntryGroup( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.update_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateEntryGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.EntryGroup) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_update_entry_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + client.update_entry_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateEntryGroupRequest() + +@pytest.mark.asyncio +async def test_update_entry_group_async(transport: str = 'grpc_asyncio', request_type=datacatalog.UpdateEntryGroupRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.update_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateEntryGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.EntryGroup) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_update_entry_group_async_from_dict(): + await test_update_entry_group_async(request_type=dict) + + +def test_update_entry_group_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.UpdateEntryGroupRequest() + + request.entry_group.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + call.return_value = datacatalog.EntryGroup() + client.update_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entry_group.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_entry_group_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.UpdateEntryGroupRequest() + + request.entry_group.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) + await client.update_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entry_group.name=name_value', + ) in kw['metadata'] + + +def test_update_entry_group_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.EntryGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_entry_group( + entry_group=datacatalog.EntryGroup(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].entry_group + mock_val = datacatalog.EntryGroup(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_entry_group_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_entry_group( + datacatalog.UpdateEntryGroupRequest(), + entry_group=datacatalog.EntryGroup(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_entry_group_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.EntryGroup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_entry_group( + entry_group=datacatalog.EntryGroup(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].entry_group + mock_val = datacatalog.EntryGroup(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_entry_group_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_entry_group( + datacatalog.UpdateEntryGroupRequest(), + entry_group=datacatalog.EntryGroup(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.DeleteEntryGroupRequest, + dict, +]) +def test_delete_entry_group(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteEntryGroupRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_entry_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + client.delete_entry_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteEntryGroupRequest() + +@pytest.mark.asyncio +async def test_delete_entry_group_async(transport: str = 'grpc_asyncio', request_type=datacatalog.DeleteEntryGroupRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteEntryGroupRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_entry_group_async_from_dict(): + await test_delete_entry_group_async(request_type=dict) + + +def test_delete_entry_group_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.DeleteEntryGroupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + call.return_value = None + client.delete_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_entry_group_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.DeleteEntryGroupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_entry_group_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_entry_group( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_entry_group_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entry_group( + datacatalog.DeleteEntryGroupRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_entry_group_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_entry_group( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_entry_group_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_entry_group( + datacatalog.DeleteEntryGroupRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.ListEntryGroupsRequest, + dict, +]) +def test_list_entry_groups(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.ListEntryGroupsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_entry_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ListEntryGroupsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntryGroupsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_entry_groups_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + client.list_entry_groups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ListEntryGroupsRequest() + +@pytest.mark.asyncio +async def test_list_entry_groups_async(transport: str = 'grpc_asyncio', request_type=datacatalog.ListEntryGroupsRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntryGroupsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_entry_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ListEntryGroupsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntryGroupsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_entry_groups_async_from_dict(): + await test_list_entry_groups_async(request_type=dict) + + +def test_list_entry_groups_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.ListEntryGroupsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + call.return_value = datacatalog.ListEntryGroupsResponse() + client.list_entry_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_entry_groups_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.ListEntryGroupsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntryGroupsResponse()) + await client.list_entry_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_entry_groups_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.ListEntryGroupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_entry_groups( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_entry_groups_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_entry_groups( + datacatalog.ListEntryGroupsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_entry_groups_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.ListEntryGroupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntryGroupsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_entry_groups( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_entry_groups_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_entry_groups( + datacatalog.ListEntryGroupsRequest(), + parent='parent_value', + ) + + +def test_list_entry_groups_pager(transport_name: str = "grpc"): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + ], + next_page_token='abc', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[], + next_page_token='def', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + ], + next_page_token='ghi', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_entry_groups(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, datacatalog.EntryGroup) + for i in results) +def test_list_entry_groups_pages(transport_name: str = "grpc"): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + ], + next_page_token='abc', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[], + next_page_token='def', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + ], + next_page_token='ghi', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_entry_groups(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_entry_groups_async_pager(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + ], + next_page_token='abc', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[], + next_page_token='def', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + ], + next_page_token='ghi', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_entry_groups(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, datacatalog.EntryGroup) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_entry_groups_async_pages(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + ], + next_page_token='abc', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[], + next_page_token='def', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + ], + next_page_token='ghi', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_entry_groups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + datacatalog.CreateEntryRequest, + dict, +]) +def test_create_entry(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.Entry( + name='name_value', + linked_resource='linked_resource_value', + fully_qualified_name='fully_qualified_name_value', + display_name='display_name_value', + description='description_value', + type_=datacatalog.EntryType.TABLE, + integrated_system=common.IntegratedSystem.BIGQUERY, + ) + response = client.create_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateEntryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) + assert response.name == 'name_value' + assert response.linked_resource == 'linked_resource_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_create_entry_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + client.create_entry() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateEntryRequest() + +@pytest.mark.asyncio +async def test_create_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.CreateEntryRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry( + name='name_value', + linked_resource='linked_resource_value', + fully_qualified_name='fully_qualified_name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.create_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateEntryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) + assert response.name == 'name_value' + assert response.linked_resource == 'linked_resource_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_create_entry_async_from_dict(): + await test_create_entry_async(request_type=dict) + + +def test_create_entry_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.CreateEntryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + call.return_value = datacatalog.Entry() + client.create_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_entry_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.CreateEntryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) + await client.create_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_entry_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.Entry() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_entry( + parent='parent_value', + entry_id='entry_id_value', + entry=datacatalog.Entry(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entry_id + mock_val = 'entry_id_value' + assert arg == mock_val + arg = args[0].entry + mock_val = datacatalog.Entry(name='name_value') + assert arg == mock_val + + +def test_create_entry_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_entry( + datacatalog.CreateEntryRequest(), + parent='parent_value', + entry_id='entry_id_value', + entry=datacatalog.Entry(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_entry_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.Entry() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_entry( + parent='parent_value', + entry_id='entry_id_value', + entry=datacatalog.Entry(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entry_id + mock_val = 'entry_id_value' + assert arg == mock_val + arg = args[0].entry + mock_val = datacatalog.Entry(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_entry_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_entry( + datacatalog.CreateEntryRequest(), + parent='parent_value', + entry_id='entry_id_value', + entry=datacatalog.Entry(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.UpdateEntryRequest, + dict, +]) +def test_update_entry(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.Entry( + name='name_value', + linked_resource='linked_resource_value', + fully_qualified_name='fully_qualified_name_value', + display_name='display_name_value', + description='description_value', + type_=datacatalog.EntryType.TABLE, + integrated_system=common.IntegratedSystem.BIGQUERY, + ) + response = client.update_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateEntryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) + assert response.name == 'name_value' + assert response.linked_resource == 'linked_resource_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_update_entry_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + client.update_entry() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateEntryRequest() + +@pytest.mark.asyncio +async def test_update_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.UpdateEntryRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry( + name='name_value', + linked_resource='linked_resource_value', + fully_qualified_name='fully_qualified_name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.update_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateEntryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) + assert response.name == 'name_value' + assert response.linked_resource == 'linked_resource_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_update_entry_async_from_dict(): + await test_update_entry_async(request_type=dict) + + +def test_update_entry_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.UpdateEntryRequest() + + request.entry.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + call.return_value = datacatalog.Entry() + client.update_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entry.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_entry_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.UpdateEntryRequest() + + request.entry.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) + await client.update_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entry.name=name_value', + ) in kw['metadata'] + + +def test_update_entry_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.Entry() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_entry( + entry=datacatalog.Entry(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].entry + mock_val = datacatalog.Entry(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_entry_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_entry( + datacatalog.UpdateEntryRequest(), + entry=datacatalog.Entry(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_entry_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.Entry() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_entry( + entry=datacatalog.Entry(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].entry + mock_val = datacatalog.Entry(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_entry_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_entry( + datacatalog.UpdateEntryRequest(), + entry=datacatalog.Entry(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.DeleteEntryRequest, + dict, +]) +def test_delete_entry(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteEntryRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_entry_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + client.delete_entry() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteEntryRequest() + +@pytest.mark.asyncio +async def test_delete_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.DeleteEntryRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteEntryRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_entry_async_from_dict(): + await test_delete_entry_async(request_type=dict) + + +def test_delete_entry_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.DeleteEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + call.return_value = None + client.delete_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_entry_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.DeleteEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_entry_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_entry( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_entry_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entry( + datacatalog.DeleteEntryRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_entry_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_entry( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_entry_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_entry( + datacatalog.DeleteEntryRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.GetEntryRequest, + dict, +]) +def test_get_entry(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.Entry( + name='name_value', + linked_resource='linked_resource_value', + fully_qualified_name='fully_qualified_name_value', + display_name='display_name_value', + description='description_value', + type_=datacatalog.EntryType.TABLE, + integrated_system=common.IntegratedSystem.BIGQUERY, + ) + response = client.get_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.GetEntryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) + assert response.name == 'name_value' + assert response.linked_resource == 'linked_resource_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_get_entry_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + client.get_entry() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.GetEntryRequest() + +@pytest.mark.asyncio +async def test_get_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.GetEntryRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry( + name='name_value', + linked_resource='linked_resource_value', + fully_qualified_name='fully_qualified_name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.get_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.GetEntryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) + assert response.name == 'name_value' + assert response.linked_resource == 'linked_resource_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_get_entry_async_from_dict(): + await test_get_entry_async(request_type=dict) + + +def test_get_entry_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.GetEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + call.return_value = datacatalog.Entry() + client.get_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_entry_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.GetEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) + await client.get_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_entry_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.Entry() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_entry( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_entry_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entry( + datacatalog.GetEntryRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_entry_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.Entry() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_entry( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_entry_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_entry( + datacatalog.GetEntryRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.LookupEntryRequest, + dict, +]) +def test_lookup_entry(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.lookup_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.Entry( + name='name_value', + linked_resource='linked_resource_value', + fully_qualified_name='fully_qualified_name_value', + display_name='display_name_value', + description='description_value', + type_=datacatalog.EntryType.TABLE, + integrated_system=common.IntegratedSystem.BIGQUERY, + ) + response = client.lookup_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.LookupEntryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) + assert response.name == 'name_value' + assert response.linked_resource == 'linked_resource_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_lookup_entry_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.lookup_entry), + '__call__') as call: + client.lookup_entry() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.LookupEntryRequest() + +@pytest.mark.asyncio +async def test_lookup_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.LookupEntryRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.lookup_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry( + name='name_value', + linked_resource='linked_resource_value', + fully_qualified_name='fully_qualified_name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.lookup_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.LookupEntryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) + assert response.name == 'name_value' + assert response.linked_resource == 'linked_resource_value' + assert response.fully_qualified_name == 'fully_qualified_name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_lookup_entry_async_from_dict(): + await test_lookup_entry_async(request_type=dict) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.ListEntriesRequest, + dict, +]) +def test_list_entries(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.ListEntriesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ListEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntriesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_entries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + client.list_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ListEntriesRequest() + +@pytest.mark.asyncio +async def test_list_entries_async(transport: str = 'grpc_asyncio', request_type=datacatalog.ListEntriesRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntriesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ListEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntriesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_entries_async_from_dict(): + await test_list_entries_async(request_type=dict) + + +def test_list_entries_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.ListEntriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + call.return_value = datacatalog.ListEntriesResponse() + client.list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_entries_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.ListEntriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntriesResponse()) + await client.list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_entries_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.ListEntriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_entries( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_entries_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_entries( + datacatalog.ListEntriesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_entries_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.ListEntriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntriesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_entries( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_entries_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_entries( + datacatalog.ListEntriesRequest(), + parent='parent_value', + ) + + +def test_list_entries_pager(transport_name: str = "grpc"): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + datacatalog.Entry(), + datacatalog.Entry(), + ], + next_page_token='abc', + ), + datacatalog.ListEntriesResponse( + entries=[], + next_page_token='def', + ), + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + ], + next_page_token='ghi', + ), + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + datacatalog.Entry(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_entries(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, datacatalog.Entry) + for i in results) +def test_list_entries_pages(transport_name: str = "grpc"): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + datacatalog.Entry(), + datacatalog.Entry(), + ], + next_page_token='abc', + ), + datacatalog.ListEntriesResponse( + entries=[], + next_page_token='def', + ), + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + ], + next_page_token='ghi', + ), + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + datacatalog.Entry(), + ], + ), + RuntimeError, + ) + pages = list(client.list_entries(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_entries_async_pager(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + datacatalog.Entry(), + datacatalog.Entry(), + ], + next_page_token='abc', + ), + datacatalog.ListEntriesResponse( + entries=[], + next_page_token='def', + ), + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + ], + next_page_token='ghi', + ), + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + datacatalog.Entry(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_entries(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, datacatalog.Entry) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_entries_async_pages(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + datacatalog.Entry(), + datacatalog.Entry(), + ], + next_page_token='abc', + ), + datacatalog.ListEntriesResponse( + entries=[], + next_page_token='def', + ), + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + ], + next_page_token='ghi', + ), + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + datacatalog.Entry(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_entries(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + datacatalog.ModifyEntryOverviewRequest, + dict, +]) +def test_modify_entry_overview(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_entry_overview), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.EntryOverview( + overview='overview_value', + ) + response = client.modify_entry_overview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ModifyEntryOverviewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.EntryOverview) + assert response.overview == 'overview_value' + + +def test_modify_entry_overview_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_entry_overview), + '__call__') as call: + client.modify_entry_overview() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ModifyEntryOverviewRequest() + +@pytest.mark.asyncio +async def test_modify_entry_overview_async(transport: str = 'grpc_asyncio', request_type=datacatalog.ModifyEntryOverviewRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_entry_overview), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryOverview( + overview='overview_value', + )) + response = await client.modify_entry_overview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ModifyEntryOverviewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.EntryOverview) + assert response.overview == 'overview_value' + + +@pytest.mark.asyncio +async def test_modify_entry_overview_async_from_dict(): + await test_modify_entry_overview_async(request_type=dict) + + +def test_modify_entry_overview_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.ModifyEntryOverviewRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_entry_overview), + '__call__') as call: + call.return_value = datacatalog.EntryOverview() + client.modify_entry_overview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_modify_entry_overview_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.ModifyEntryOverviewRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_entry_overview), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryOverview()) + await client.modify_entry_overview(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + datacatalog.ModifyEntryContactsRequest, + dict, +]) +def test_modify_entry_contacts(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_entry_contacts), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.Contacts( + ) + response = client.modify_entry_contacts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ModifyEntryContactsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Contacts) + + +def test_modify_entry_contacts_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_entry_contacts), + '__call__') as call: + client.modify_entry_contacts() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ModifyEntryContactsRequest() + +@pytest.mark.asyncio +async def test_modify_entry_contacts_async(transport: str = 'grpc_asyncio', request_type=datacatalog.ModifyEntryContactsRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_entry_contacts), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Contacts( + )) + response = await client.modify_entry_contacts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ModifyEntryContactsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Contacts) + + +@pytest.mark.asyncio +async def test_modify_entry_contacts_async_from_dict(): + await test_modify_entry_contacts_async(request_type=dict) + + +def test_modify_entry_contacts_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.ModifyEntryContactsRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_entry_contacts), + '__call__') as call: + call.return_value = datacatalog.Contacts() + client.modify_entry_contacts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_modify_entry_contacts_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.ModifyEntryContactsRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_entry_contacts), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Contacts()) + await client.modify_entry_contacts(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + datacatalog.CreateTagTemplateRequest, + dict, +]) +def test_create_tag_template(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplate( + name='name_value', + display_name='display_name_value', + is_publicly_readable=True, + ) + response = client.create_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateTagTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.is_publicly_readable is True + + +def test_create_tag_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template), + '__call__') as call: + client.create_tag_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateTagTemplateRequest() + +@pytest.mark.asyncio +async def test_create_tag_template_async(transport: str = 'grpc_asyncio', request_type=datacatalog.CreateTagTemplateRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate( + name='name_value', + display_name='display_name_value', + is_publicly_readable=True, + )) + response = await client.create_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateTagTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.is_publicly_readable is True + + +@pytest.mark.asyncio +async def test_create_tag_template_async_from_dict(): + await test_create_tag_template_async(request_type=dict) + + +def test_create_tag_template_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.CreateTagTemplateRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template), + '__call__') as call: + call.return_value = tags.TagTemplate() + client.create_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_tag_template_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.CreateTagTemplateRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) + await client.create_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_tag_template_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_tag_template( + parent='parent_value', + tag_template_id='tag_template_id_value', + tag_template=tags.TagTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].tag_template_id + mock_val = 'tag_template_id_value' + assert arg == mock_val + arg = args[0].tag_template + mock_val = tags.TagTemplate(name='name_value') + assert arg == mock_val + + +def test_create_tag_template_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_tag_template( + datacatalog.CreateTagTemplateRequest(), + parent='parent_value', + tag_template_id='tag_template_id_value', + tag_template=tags.TagTemplate(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_tag_template_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_tag_template( + parent='parent_value', + tag_template_id='tag_template_id_value', + tag_template=tags.TagTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].tag_template_id + mock_val = 'tag_template_id_value' + assert arg == mock_val + arg = args[0].tag_template + mock_val = tags.TagTemplate(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_tag_template_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_tag_template( + datacatalog.CreateTagTemplateRequest(), + parent='parent_value', + tag_template_id='tag_template_id_value', + tag_template=tags.TagTemplate(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.GetTagTemplateRequest, + dict, +]) +def test_get_tag_template(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplate( + name='name_value', + display_name='display_name_value', + is_publicly_readable=True, + ) + response = client.get_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.GetTagTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.is_publicly_readable is True + + +def test_get_tag_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tag_template), + '__call__') as call: + client.get_tag_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.GetTagTemplateRequest() + +@pytest.mark.asyncio +async def test_get_tag_template_async(transport: str = 'grpc_asyncio', request_type=datacatalog.GetTagTemplateRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate( + name='name_value', + display_name='display_name_value', + is_publicly_readable=True, + )) + response = await client.get_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.GetTagTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.is_publicly_readable is True + + +@pytest.mark.asyncio +async def test_get_tag_template_async_from_dict(): + await test_get_tag_template_async(request_type=dict) + + +def test_get_tag_template_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.GetTagTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tag_template), + '__call__') as call: + call.return_value = tags.TagTemplate() + client.get_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_tag_template_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.GetTagTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tag_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) + await client.get_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_tag_template_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_tag_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_tag_template_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_tag_template( + datacatalog.GetTagTemplateRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_tag_template_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_tag_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_tag_template_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_tag_template( + datacatalog.GetTagTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.UpdateTagTemplateRequest, + dict, +]) +def test_update_tag_template(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplate( + name='name_value', + display_name='display_name_value', + is_publicly_readable=True, + ) + response = client.update_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateTagTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.is_publicly_readable is True + + +def test_update_tag_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template), + '__call__') as call: + client.update_tag_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateTagTemplateRequest() + +@pytest.mark.asyncio +async def test_update_tag_template_async(transport: str = 'grpc_asyncio', request_type=datacatalog.UpdateTagTemplateRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate( + name='name_value', + display_name='display_name_value', + is_publicly_readable=True, + )) + response = await client.update_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateTagTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.is_publicly_readable is True + + +@pytest.mark.asyncio +async def test_update_tag_template_async_from_dict(): + await test_update_tag_template_async(request_type=dict) + + +def test_update_tag_template_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.UpdateTagTemplateRequest() + + request.tag_template.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template), + '__call__') as call: + call.return_value = tags.TagTemplate() + client.update_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'tag_template.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_tag_template_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.UpdateTagTemplateRequest() + + request.tag_template.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) + await client.update_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'tag_template.name=name_value', + ) in kw['metadata'] + + +def test_update_tag_template_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_tag_template( + tag_template=tags.TagTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].tag_template + mock_val = tags.TagTemplate(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_tag_template_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_tag_template( + datacatalog.UpdateTagTemplateRequest(), + tag_template=tags.TagTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_tag_template_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_tag_template( + tag_template=tags.TagTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].tag_template + mock_val = tags.TagTemplate(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_tag_template_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_tag_template( + datacatalog.UpdateTagTemplateRequest(), + tag_template=tags.TagTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.DeleteTagTemplateRequest, + dict, +]) +def test_delete_tag_template(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteTagTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_tag_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template), + '__call__') as call: + client.delete_tag_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteTagTemplateRequest() + +@pytest.mark.asyncio +async def test_delete_tag_template_async(transport: str = 'grpc_asyncio', request_type=datacatalog.DeleteTagTemplateRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteTagTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_tag_template_async_from_dict(): + await test_delete_tag_template_async(request_type=dict) + + +def test_delete_tag_template_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.DeleteTagTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template), + '__call__') as call: + call.return_value = None + client.delete_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_tag_template_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.DeleteTagTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_tag_template_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_tag_template( + name='name_value', + force=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].force + mock_val = True + assert arg == mock_val + + +def test_delete_tag_template_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_tag_template( + datacatalog.DeleteTagTemplateRequest(), + name='name_value', + force=True, + ) + +@pytest.mark.asyncio +async def test_delete_tag_template_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_tag_template( + name='name_value', + force=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].force + mock_val = True + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_tag_template_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_tag_template( + datacatalog.DeleteTagTemplateRequest(), + name='name_value', + force=True, + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.CreateTagTemplateFieldRequest, + dict, +]) +def test_create_tag_template_field(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField( + name='name_value', + display_name='display_name_value', + is_required=True, + description='description_value', + order=540, + ) + response = client.create_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateTagTemplateFieldRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.is_required is True + assert response.description == 'description_value' + assert response.order == 540 + + +def test_create_tag_template_field_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template_field), + '__call__') as call: + client.create_tag_template_field() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateTagTemplateFieldRequest() + +@pytest.mark.asyncio +async def test_create_tag_template_field_async(transport: str = 'grpc_asyncio', request_type=datacatalog.CreateTagTemplateFieldRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField( + name='name_value', + display_name='display_name_value', + is_required=True, + description='description_value', + order=540, + )) + response = await client.create_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateTagTemplateFieldRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.is_required is True + assert response.description == 'description_value' + assert response.order == 540 + + +@pytest.mark.asyncio +async def test_create_tag_template_field_async_from_dict(): + await test_create_tag_template_field_async(request_type=dict) + + +def test_create_tag_template_field_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.CreateTagTemplateFieldRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template_field), + '__call__') as call: + call.return_value = tags.TagTemplateField() + client.create_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_tag_template_field_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.CreateTagTemplateFieldRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template_field), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) + await client.create_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_tag_template_field_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_tag_template_field( + parent='parent_value', + tag_template_field_id='tag_template_field_id_value', + tag_template_field=tags.TagTemplateField(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].tag_template_field_id + mock_val = 'tag_template_field_id_value' + assert arg == mock_val + arg = args[0].tag_template_field + mock_val = tags.TagTemplateField(name='name_value') + assert arg == mock_val + + +def test_create_tag_template_field_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_tag_template_field( + datacatalog.CreateTagTemplateFieldRequest(), + parent='parent_value', + tag_template_field_id='tag_template_field_id_value', + tag_template_field=tags.TagTemplateField(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_tag_template_field_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_tag_template_field( + parent='parent_value', + tag_template_field_id='tag_template_field_id_value', + tag_template_field=tags.TagTemplateField(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].tag_template_field_id + mock_val = 'tag_template_field_id_value' + assert arg == mock_val + arg = args[0].tag_template_field + mock_val = tags.TagTemplateField(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_tag_template_field_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_tag_template_field( + datacatalog.CreateTagTemplateFieldRequest(), + parent='parent_value', + tag_template_field_id='tag_template_field_id_value', + tag_template_field=tags.TagTemplateField(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.UpdateTagTemplateFieldRequest, + dict, +]) +def test_update_tag_template_field(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField( + name='name_value', + display_name='display_name_value', + is_required=True, + description='description_value', + order=540, + ) + response = client.update_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.is_required is True + assert response.description == 'description_value' + assert response.order == 540 + + +def test_update_tag_template_field_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template_field), + '__call__') as call: + client.update_tag_template_field() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() + +@pytest.mark.asyncio +async def test_update_tag_template_field_async(transport: str = 'grpc_asyncio', request_type=datacatalog.UpdateTagTemplateFieldRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField( + name='name_value', + display_name='display_name_value', + is_required=True, + description='description_value', + order=540, + )) + response = await client.update_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.is_required is True + assert response.description == 'description_value' + assert response.order == 540 + + +@pytest.mark.asyncio +async def test_update_tag_template_field_async_from_dict(): + await test_update_tag_template_field_async(request_type=dict) + + +def test_update_tag_template_field_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.UpdateTagTemplateFieldRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template_field), + '__call__') as call: + call.return_value = tags.TagTemplateField() + client.update_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_tag_template_field_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.UpdateTagTemplateFieldRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template_field), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) + await client.update_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_update_tag_template_field_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_tag_template_field( + name='name_value', + tag_template_field=tags.TagTemplateField(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].tag_template_field + mock_val = tags.TagTemplateField(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_tag_template_field_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_tag_template_field( + datacatalog.UpdateTagTemplateFieldRequest(), + name='name_value', + tag_template_field=tags.TagTemplateField(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_tag_template_field_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_tag_template_field( + name='name_value', + tag_template_field=tags.TagTemplateField(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].tag_template_field + mock_val = tags.TagTemplateField(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_tag_template_field_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_tag_template_field( + datacatalog.UpdateTagTemplateFieldRequest(), + name='name_value', + tag_template_field=tags.TagTemplateField(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.RenameTagTemplateFieldRequest, + dict, +]) +def test_rename_tag_template_field(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField( + name='name_value', + display_name='display_name_value', + is_required=True, + description='description_value', + order=540, + ) + response = client.rename_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.RenameTagTemplateFieldRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.is_required is True + assert response.description == 'description_value' + assert response.order == 540 + + +def test_rename_tag_template_field_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field), + '__call__') as call: + client.rename_tag_template_field() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.RenameTagTemplateFieldRequest() + +@pytest.mark.asyncio +async def test_rename_tag_template_field_async(transport: str = 'grpc_asyncio', request_type=datacatalog.RenameTagTemplateFieldRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField( + name='name_value', + display_name='display_name_value', + is_required=True, + description='description_value', + order=540, + )) + response = await client.rename_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.RenameTagTemplateFieldRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.is_required is True + assert response.description == 'description_value' + assert response.order == 540 + + +@pytest.mark.asyncio +async def test_rename_tag_template_field_async_from_dict(): + await test_rename_tag_template_field_async(request_type=dict) + + +def test_rename_tag_template_field_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.RenameTagTemplateFieldRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field), + '__call__') as call: + call.return_value = tags.TagTemplateField() + client.rename_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_rename_tag_template_field_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.RenameTagTemplateFieldRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) + await client.rename_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_rename_tag_template_field_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.rename_tag_template_field( + name='name_value', + new_tag_template_field_id='new_tag_template_field_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].new_tag_template_field_id + mock_val = 'new_tag_template_field_id_value' + assert arg == mock_val + + +def test_rename_tag_template_field_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rename_tag_template_field( + datacatalog.RenameTagTemplateFieldRequest(), + name='name_value', + new_tag_template_field_id='new_tag_template_field_id_value', + ) + +@pytest.mark.asyncio +async def test_rename_tag_template_field_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.rename_tag_template_field( + name='name_value', + new_tag_template_field_id='new_tag_template_field_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].new_tag_template_field_id + mock_val = 'new_tag_template_field_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_rename_tag_template_field_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.rename_tag_template_field( + datacatalog.RenameTagTemplateFieldRequest(), + name='name_value', + new_tag_template_field_id='new_tag_template_field_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.RenameTagTemplateFieldEnumValueRequest, + dict, +]) +def test_rename_tag_template_field_enum_value(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField( + name='name_value', + display_name='display_name_value', + is_required=True, + description='description_value', + order=540, + ) + response = client.rename_tag_template_field_enum_value(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.RenameTagTemplateFieldEnumValueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.is_required is True + assert response.description == 'description_value' + assert response.order == 540 + + +def test_rename_tag_template_field_enum_value_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), + '__call__') as call: + client.rename_tag_template_field_enum_value() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.RenameTagTemplateFieldEnumValueRequest() + +@pytest.mark.asyncio +async def test_rename_tag_template_field_enum_value_async(transport: str = 'grpc_asyncio', request_type=datacatalog.RenameTagTemplateFieldEnumValueRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField( + name='name_value', + display_name='display_name_value', + is_required=True, + description='description_value', + order=540, + )) + response = await client.rename_tag_template_field_enum_value(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.RenameTagTemplateFieldEnumValueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.is_required is True + assert response.description == 'description_value' + assert response.order == 540 + + +@pytest.mark.asyncio +async def test_rename_tag_template_field_enum_value_async_from_dict(): + await test_rename_tag_template_field_enum_value_async(request_type=dict) + + +def test_rename_tag_template_field_enum_value_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.RenameTagTemplateFieldEnumValueRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), + '__call__') as call: + call.return_value = tags.TagTemplateField() + client.rename_tag_template_field_enum_value(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_rename_tag_template_field_enum_value_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.RenameTagTemplateFieldEnumValueRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) + await client.rename_tag_template_field_enum_value(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_rename_tag_template_field_enum_value_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.rename_tag_template_field_enum_value( + name='name_value', + new_enum_value_display_name='new_enum_value_display_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].new_enum_value_display_name + mock_val = 'new_enum_value_display_name_value' + assert arg == mock_val + + +def test_rename_tag_template_field_enum_value_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rename_tag_template_field_enum_value( + datacatalog.RenameTagTemplateFieldEnumValueRequest(), + name='name_value', + new_enum_value_display_name='new_enum_value_display_name_value', + ) + +@pytest.mark.asyncio +async def test_rename_tag_template_field_enum_value_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.rename_tag_template_field_enum_value( + name='name_value', + new_enum_value_display_name='new_enum_value_display_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].new_enum_value_display_name + mock_val = 'new_enum_value_display_name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_rename_tag_template_field_enum_value_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.rename_tag_template_field_enum_value( + datacatalog.RenameTagTemplateFieldEnumValueRequest(), + name='name_value', + new_enum_value_display_name='new_enum_value_display_name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.DeleteTagTemplateFieldRequest, + dict, +]) +def test_delete_tag_template_field(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteTagTemplateFieldRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_tag_template_field_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template_field), + '__call__') as call: + client.delete_tag_template_field() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteTagTemplateFieldRequest() + +@pytest.mark.asyncio +async def test_delete_tag_template_field_async(transport: str = 'grpc_asyncio', request_type=datacatalog.DeleteTagTemplateFieldRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteTagTemplateFieldRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_tag_template_field_async_from_dict(): + await test_delete_tag_template_field_async(request_type=dict) + + +def test_delete_tag_template_field_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.DeleteTagTemplateFieldRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template_field), + '__call__') as call: + call.return_value = None + client.delete_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_tag_template_field_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.DeleteTagTemplateFieldRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template_field), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_tag_template_field_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_tag_template_field( + name='name_value', + force=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].force + mock_val = True + assert arg == mock_val + + +def test_delete_tag_template_field_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_tag_template_field( + datacatalog.DeleteTagTemplateFieldRequest(), + name='name_value', + force=True, + ) + +@pytest.mark.asyncio +async def test_delete_tag_template_field_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_tag_template_field( + name='name_value', + force=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].force + mock_val = True + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_tag_template_field_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_tag_template_field( + datacatalog.DeleteTagTemplateFieldRequest(), + name='name_value', + force=True, + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.CreateTagRequest, + dict, +]) +def test_create_tag(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.Tag( + name='name_value', + template='template_value', + template_display_name='template_display_name_value', + column='column_value', + ) + response = client.create_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.Tag) + assert response.name == 'name_value' + assert response.template == 'template_value' + assert response.template_display_name == 'template_display_name_value' + + +def test_create_tag_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag), + '__call__') as call: + client.create_tag() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateTagRequest() + +@pytest.mark.asyncio +async def test_create_tag_async(transport: str = 'grpc_asyncio', request_type=datacatalog.CreateTagRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag( + name='name_value', + template='template_value', + template_display_name='template_display_name_value', + )) + response = await client.create_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.Tag) + assert response.name == 'name_value' + assert response.template == 'template_value' + assert response.template_display_name == 'template_display_name_value' + + +@pytest.mark.asyncio +async def test_create_tag_async_from_dict(): + await test_create_tag_async(request_type=dict) + + +def test_create_tag_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.CreateTagRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag), + '__call__') as call: + call.return_value = tags.Tag() + client.create_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_tag_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.CreateTagRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) + await client.create_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_tag_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.Tag() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_tag( + parent='parent_value', + tag=tags.Tag(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].tag + mock_val = tags.Tag(name='name_value') + assert arg == mock_val + + +def test_create_tag_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_tag( + datacatalog.CreateTagRequest(), + parent='parent_value', + tag=tags.Tag(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_tag_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.Tag() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_tag( + parent='parent_value', + tag=tags.Tag(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].tag + mock_val = tags.Tag(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_tag_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_tag( + datacatalog.CreateTagRequest(), + parent='parent_value', + tag=tags.Tag(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.UpdateTagRequest, + dict, +]) +def test_update_tag(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.Tag( + name='name_value', + template='template_value', + template_display_name='template_display_name_value', + column='column_value', + ) + response = client.update_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.Tag) + assert response.name == 'name_value' + assert response.template == 'template_value' + assert response.template_display_name == 'template_display_name_value' + + +def test_update_tag_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag), + '__call__') as call: + client.update_tag() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateTagRequest() + +@pytest.mark.asyncio +async def test_update_tag_async(transport: str = 'grpc_asyncio', request_type=datacatalog.UpdateTagRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag( + name='name_value', + template='template_value', + template_display_name='template_display_name_value', + )) + response = await client.update_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.Tag) + assert response.name == 'name_value' + assert response.template == 'template_value' + assert response.template_display_name == 'template_display_name_value' + + +@pytest.mark.asyncio +async def test_update_tag_async_from_dict(): + await test_update_tag_async(request_type=dict) + + +def test_update_tag_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.UpdateTagRequest() + + request.tag.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag), + '__call__') as call: + call.return_value = tags.Tag() + client.update_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'tag.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_tag_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.UpdateTagRequest() + + request.tag.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) + await client.update_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'tag.name=name_value', + ) in kw['metadata'] + + +def test_update_tag_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.Tag() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_tag( + tag=tags.Tag(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].tag + mock_val = tags.Tag(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_tag_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_tag( + datacatalog.UpdateTagRequest(), + tag=tags.Tag(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_tag_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.Tag() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_tag( + tag=tags.Tag(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].tag + mock_val = tags.Tag(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_tag_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_tag( + datacatalog.UpdateTagRequest(), + tag=tags.Tag(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.DeleteTagRequest, + dict, +]) +def test_delete_tag(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteTagRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_tag_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag), + '__call__') as call: + client.delete_tag() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteTagRequest() + +@pytest.mark.asyncio +async def test_delete_tag_async(transport: str = 'grpc_asyncio', request_type=datacatalog.DeleteTagRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteTagRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_tag_async_from_dict(): + await test_delete_tag_async(request_type=dict) + + +def test_delete_tag_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.DeleteTagRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag), + '__call__') as call: + call.return_value = None + client.delete_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_tag_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.DeleteTagRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_tag_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_tag( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_tag_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_tag( + datacatalog.DeleteTagRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_tag_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_tag( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_tag_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_tag( + datacatalog.DeleteTagRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.ListTagsRequest, + dict, +]) +def test_list_tags(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.ListTagsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ListTagsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTagsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_tags_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), + '__call__') as call: + client.list_tags() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ListTagsRequest() + +@pytest.mark.asyncio +async def test_list_tags_async(transport: str = 'grpc_asyncio', request_type=datacatalog.ListTagsRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListTagsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ListTagsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTagsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_tags_async_from_dict(): + await test_list_tags_async(request_type=dict) + + +def test_list_tags_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.ListTagsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), + '__call__') as call: + call.return_value = datacatalog.ListTagsResponse() + client.list_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_tags_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.ListTagsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListTagsResponse()) + await client.list_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_tags_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.ListTagsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_tags( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_tags_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tags( + datacatalog.ListTagsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_tags_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.ListTagsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListTagsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_tags( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_tags_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_tags( + datacatalog.ListTagsRequest(), + parent='parent_value', + ) + + +def test_list_tags_pager(transport_name: str = "grpc"): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + tags.Tag(), + tags.Tag(), + ], + next_page_token='abc', + ), + datacatalog.ListTagsResponse( + tags=[], + next_page_token='def', + ), + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + ], + next_page_token='ghi', + ), + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + tags.Tag(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_tags(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tags.Tag) + for i in results) +def test_list_tags_pages(transport_name: str = "grpc"): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + tags.Tag(), + tags.Tag(), + ], + next_page_token='abc', + ), + datacatalog.ListTagsResponse( + tags=[], + next_page_token='def', + ), + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + ], + next_page_token='ghi', + ), + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + tags.Tag(), + ], + ), + RuntimeError, + ) + pages = list(client.list_tags(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_tags_async_pager(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + tags.Tag(), + tags.Tag(), + ], + next_page_token='abc', + ), + datacatalog.ListTagsResponse( + tags=[], + next_page_token='def', + ), + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + ], + next_page_token='ghi', + ), + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + tags.Tag(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_tags(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, tags.Tag) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_tags_async_pages(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + tags.Tag(), + tags.Tag(), + ], + next_page_token='abc', + ), + datacatalog.ListTagsResponse( + tags=[], + next_page_token='def', + ), + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + ], + next_page_token='ghi', + ), + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + tags.Tag(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_tags(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + datacatalog.ReconcileTagsRequest, + dict, +]) +def test_reconcile_tags(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reconcile_tags), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.reconcile_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ReconcileTagsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_reconcile_tags_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reconcile_tags), + '__call__') as call: + client.reconcile_tags() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ReconcileTagsRequest() + +@pytest.mark.asyncio +async def test_reconcile_tags_async(transport: str = 'grpc_asyncio', request_type=datacatalog.ReconcileTagsRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reconcile_tags), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.reconcile_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ReconcileTagsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_reconcile_tags_async_from_dict(): + await test_reconcile_tags_async(request_type=dict) + + +def test_reconcile_tags_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.ReconcileTagsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reconcile_tags), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.reconcile_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_reconcile_tags_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.ReconcileTagsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reconcile_tags), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.reconcile_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + datacatalog.StarEntryRequest, + dict, +]) +def test_star_entry(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.star_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.StarEntryResponse( + ) + response = client.star_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.StarEntryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.StarEntryResponse) + + +def test_star_entry_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.star_entry), + '__call__') as call: + client.star_entry() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.StarEntryRequest() + +@pytest.mark.asyncio +async def test_star_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.StarEntryRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.star_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.StarEntryResponse( + )) + response = await client.star_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.StarEntryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.StarEntryResponse) + + +@pytest.mark.asyncio +async def test_star_entry_async_from_dict(): + await test_star_entry_async(request_type=dict) + + +def test_star_entry_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.StarEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.star_entry), + '__call__') as call: + call.return_value = datacatalog.StarEntryResponse() + client.star_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_star_entry_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.StarEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.star_entry), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.StarEntryResponse()) + await client.star_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_star_entry_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.star_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.StarEntryResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.star_entry( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_star_entry_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.star_entry( + datacatalog.StarEntryRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_star_entry_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.star_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.StarEntryResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.StarEntryResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.star_entry( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_star_entry_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.star_entry( + datacatalog.StarEntryRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.UnstarEntryRequest, + dict, +]) +def test_unstar_entry(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.unstar_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.UnstarEntryResponse( + ) + response = client.unstar_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UnstarEntryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.UnstarEntryResponse) + + +def test_unstar_entry_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.unstar_entry), + '__call__') as call: + client.unstar_entry() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UnstarEntryRequest() + +@pytest.mark.asyncio +async def test_unstar_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.UnstarEntryRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.unstar_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.UnstarEntryResponse( + )) + response = await client.unstar_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UnstarEntryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.UnstarEntryResponse) + + +@pytest.mark.asyncio +async def test_unstar_entry_async_from_dict(): + await test_unstar_entry_async(request_type=dict) + + +def test_unstar_entry_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.UnstarEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.unstar_entry), + '__call__') as call: + call.return_value = datacatalog.UnstarEntryResponse() + client.unstar_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_unstar_entry_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.UnstarEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.unstar_entry), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.UnstarEntryResponse()) + await client.unstar_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_unstar_entry_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.unstar_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.UnstarEntryResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.unstar_entry( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_unstar_entry_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.unstar_entry( + datacatalog.UnstarEntryRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_unstar_entry_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.unstar_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.UnstarEntryResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.UnstarEntryResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.unstar_entry( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_unstar_entry_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.unstar_entry( + datacatalog.UnstarEntryRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.SetIamPolicyRequest, + dict, +]) +def test_set_iam_policy(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_set_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + + +def test_set_iam_policy_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_set_iam_policy_from_dict_foreign(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy(request={ + 'resource': 'resource_value', + 'policy': policy_pb2.Policy(version=774), + 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), + } + ) + call.assert_called() + + +def test_set_iam_policy_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + + +def test_set_iam_policy_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.GetIamPolicyRequest, + dict, +]) +def test_get_iam_policy(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_get_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + + +def test_get_iam_policy_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_get_iam_policy_from_dict_foreign(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy(request={ + 'resource': 'resource_value', + 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_get_iam_policy_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + + +def test_get_iam_policy_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, +]) +def test_test_iam_permissions(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + client.test_iam_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + )) + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + + +def test_test_iam_permissions_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_test_iam_permissions_from_dict_foreign(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions(request={ + 'resource': 'resource_value', + 'permissions': ['permissions_value'], + } + ) + call.assert_called() + + +@pytest.mark.parametrize("request_type", [ + datacatalog.ImportEntriesRequest, + dict, +]) +def test_import_entries(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.import_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ImportEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_import_entries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_entries), + '__call__') as call: + client.import_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ImportEntriesRequest() + +@pytest.mark.asyncio +async def test_import_entries_async(transport: str = 'grpc_asyncio', request_type=datacatalog.ImportEntriesRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.import_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ImportEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_import_entries_async_from_dict(): + await test_import_entries_async(request_type=dict) + + +def test_import_entries_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.ImportEntriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_entries), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.import_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_import_entries_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.ImportEntriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_entries), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.import_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataCatalogGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataCatalogGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataCatalogClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DataCatalogGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataCatalogClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataCatalogClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataCatalogGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataCatalogClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataCatalogGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DataCatalogClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataCatalogGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataCatalogGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.DataCatalogGrpcTransport, + transports.DataCatalogGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", +]) +def test_transport_kind(transport_name): + transport = DataCatalogClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DataCatalogGrpcTransport, + ) + +def test_data_catalog_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DataCatalogTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_data_catalog_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.datacatalog_v1.services.data_catalog.transports.DataCatalogTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.DataCatalogTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'search_catalog', + 'create_entry_group', + 'get_entry_group', + 'update_entry_group', + 'delete_entry_group', + 'list_entry_groups', + 'create_entry', + 'update_entry', + 'delete_entry', + 'get_entry', + 'lookup_entry', + 'list_entries', + 'modify_entry_overview', + 'modify_entry_contacts', + 'create_tag_template', + 'get_tag_template', + 'update_tag_template', + 'delete_tag_template', + 'create_tag_template_field', + 'update_tag_template_field', + 'rename_tag_template_field', + 'rename_tag_template_field_enum_value', + 'delete_tag_template_field', + 'create_tag', + 'update_tag', + 'delete_tag', + 'list_tags', + 'reconcile_tags', + 'star_entry', + 'unstar_entry', + 'set_iam_policy', + 'get_iam_policy', + 'test_iam_permissions', + 'import_entries', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_data_catalog_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.datacatalog_v1.services.data_catalog.transports.DataCatalogTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataCatalogTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_data_catalog_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.datacatalog_v1.services.data_catalog.transports.DataCatalogTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataCatalogTransport() + adc.assert_called_once() + + +def test_data_catalog_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataCatalogClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataCatalogGrpcTransport, + transports.DataCatalogGrpcAsyncIOTransport, + ], +) +def test_data_catalog_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataCatalogGrpcTransport, + transports.DataCatalogGrpcAsyncIOTransport, + ], +) +def test_data_catalog_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataCatalogGrpcTransport, grpc_helpers), + (transports.DataCatalogGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_data_catalog_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="datacatalog.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport]) +def test_data_catalog_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_data_catalog_host_no_port(transport_name): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'datacatalog.googleapis.com:443' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_data_catalog_host_with_port(transport_name): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'datacatalog.googleapis.com:8000' + ) + +def test_data_catalog_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataCatalogGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_data_catalog_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataCatalogGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport]) +def test_data_catalog_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport]) +def test_data_catalog_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_data_catalog_grpc_lro_client(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_data_catalog_grpc_lro_async_client(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_entry_path(): + project = "squid" + location = "clam" + entry_group = "whelk" + entry = "octopus" + expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format(project=project, location=location, entry_group=entry_group, entry=entry, ) + actual = DataCatalogClient.entry_path(project, location, entry_group, entry) + assert expected == actual + + +def test_parse_entry_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "entry_group": "cuttlefish", + "entry": "mussel", + } + path = DataCatalogClient.entry_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_entry_path(path) + assert expected == actual + +def test_entry_group_path(): + project = "winkle" + location = "nautilus" + entry_group = "scallop" + expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}".format(project=project, location=location, entry_group=entry_group, ) + actual = DataCatalogClient.entry_group_path(project, location, entry_group) + assert expected == actual + + +def test_parse_entry_group_path(): + expected = { + "project": "abalone", + "location": "squid", + "entry_group": "clam", + } + path = DataCatalogClient.entry_group_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_entry_group_path(path) + assert expected == actual + +def test_tag_path(): + project = "whelk" + location = "octopus" + entry_group = "oyster" + entry = "nudibranch" + tag = "cuttlefish" + expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format(project=project, location=location, entry_group=entry_group, entry=entry, tag=tag, ) + actual = DataCatalogClient.tag_path(project, location, entry_group, entry, tag) + assert expected == actual + + +def test_parse_tag_path(): + expected = { + "project": "mussel", + "location": "winkle", + "entry_group": "nautilus", + "entry": "scallop", + "tag": "abalone", + } + path = DataCatalogClient.tag_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_tag_path(path) + assert expected == actual + +def test_tag_template_path(): + project = "squid" + location = "clam" + tag_template = "whelk" + expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}".format(project=project, location=location, tag_template=tag_template, ) + actual = DataCatalogClient.tag_template_path(project, location, tag_template) + assert expected == actual + + +def test_parse_tag_template_path(): + expected = { + "project": "octopus", + "location": "oyster", + "tag_template": "nudibranch", + } + path = DataCatalogClient.tag_template_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_tag_template_path(path) + assert expected == actual + +def test_tag_template_field_path(): + project = "cuttlefish" + location = "mussel" + tag_template = "winkle" + field = "nautilus" + expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}".format(project=project, location=location, tag_template=tag_template, field=field, ) + actual = DataCatalogClient.tag_template_field_path(project, location, tag_template, field) + assert expected == actual + + +def test_parse_tag_template_field_path(): + expected = { + "project": "scallop", + "location": "abalone", + "tag_template": "squid", + "field": "clam", + } + path = DataCatalogClient.tag_template_field_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_tag_template_field_path(path) + assert expected == actual + +def test_tag_template_field_enum_value_path(): + project = "whelk" + location = "octopus" + tag_template = "oyster" + tag_template_field_id = "nudibranch" + enum_value_display_name = "cuttlefish" + expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name}".format(project=project, location=location, tag_template=tag_template, tag_template_field_id=tag_template_field_id, enum_value_display_name=enum_value_display_name, ) + actual = DataCatalogClient.tag_template_field_enum_value_path(project, location, tag_template, tag_template_field_id, enum_value_display_name) + assert expected == actual + + +def test_parse_tag_template_field_enum_value_path(): + expected = { + "project": "mussel", + "location": "winkle", + "tag_template": "nautilus", + "tag_template_field_id": "scallop", + "enum_value_display_name": "abalone", + } + path = DataCatalogClient.tag_template_field_enum_value_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_tag_template_field_enum_value_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = DataCatalogClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = DataCatalogClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = DataCatalogClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = DataCatalogClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = DataCatalogClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = DataCatalogClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = DataCatalogClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = DataCatalogClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = DataCatalogClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = DataCatalogClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.DataCatalogTransport, '_prep_wrapped_messages') as prep: + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.DataCatalogTransport, '_prep_wrapped_messages') as prep: + transport_class = DataCatalogClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_delete_operation(transport: str = "grpc"): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (DataCatalogClient, transports.DataCatalogGrpcTransport), + (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py b/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py new file mode 100644 index 000000000000..1e3777f4c4fc --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py @@ -0,0 +1,5041 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.datacatalog_v1.services.policy_tag_manager import PolicyTagManagerAsyncClient +from google.cloud.datacatalog_v1.services.policy_tag_manager import PolicyTagManagerClient +from google.cloud.datacatalog_v1.services.policy_tag_manager import pagers +from google.cloud.datacatalog_v1.services.policy_tag_manager import transports +from google.cloud.datacatalog_v1.types import common +from google.cloud.datacatalog_v1.types import policytagmanager +from google.cloud.datacatalog_v1.types import timestamps +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PolicyTagManagerClient._get_default_mtls_endpoint(None) is None + assert PolicyTagManagerClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert PolicyTagManagerClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert PolicyTagManagerClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert PolicyTagManagerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert PolicyTagManagerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (PolicyTagManagerClient, "grpc"), + (PolicyTagManagerAsyncClient, "grpc_asyncio"), +]) +def test_policy_tag_manager_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'datacatalog.googleapis.com:443' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.PolicyTagManagerGrpcTransport, "grpc"), + (transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_policy_tag_manager_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (PolicyTagManagerClient, "grpc"), + (PolicyTagManagerAsyncClient, "grpc_asyncio"), +]) +def test_policy_tag_manager_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'datacatalog.googleapis.com:443' + ) + + +def test_policy_tag_manager_client_get_transport_class(): + transport = PolicyTagManagerClient.get_transport_class() + available_transports = [ + transports.PolicyTagManagerGrpcTransport, + ] + assert transport in available_transports + + transport = PolicyTagManagerClient.get_transport_class("grpc") + assert transport == transports.PolicyTagManagerGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc"), + (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(PolicyTagManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerClient)) +@mock.patch.object(PolicyTagManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerAsyncClient)) +def test_policy_tag_manager_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(PolicyTagManagerClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(PolicyTagManagerClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc", "true"), + (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc", "false"), + (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(PolicyTagManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerClient)) +@mock.patch.object(PolicyTagManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_policy_tag_manager_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + PolicyTagManagerClient, PolicyTagManagerAsyncClient +]) +@mock.patch.object(PolicyTagManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerClient)) +@mock.patch.object(PolicyTagManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerAsyncClient)) +def test_policy_tag_manager_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc"), + (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_policy_tag_manager_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc", grpc_helpers), + (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_policy_tag_manager_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_policy_tag_manager_client_client_options_from_dict(): + with mock.patch('google.cloud.datacatalog_v1.services.policy_tag_manager.transports.PolicyTagManagerGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = PolicyTagManagerClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc", grpc_helpers), + (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_policy_tag_manager_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="datacatalog.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + policytagmanager.CreateTaxonomyRequest, + dict, +]) +def test_create_taxonomy(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy( + name='name_value', + display_name='display_name_value', + description='description_value', + policy_tag_count=1715, + activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], + ) + response = client.create_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.CreateTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.policy_tag_count == 1715 + assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] + + +def test_create_taxonomy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_taxonomy), + '__call__') as call: + client.create_taxonomy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.CreateTaxonomyRequest() + +@pytest.mark.asyncio +async def test_create_taxonomy_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.CreateTaxonomyRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy( + name='name_value', + display_name='display_name_value', + description='description_value', + policy_tag_count=1715, + activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], + )) + response = await client.create_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.CreateTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.policy_tag_count == 1715 + assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] + + +@pytest.mark.asyncio +async def test_create_taxonomy_async_from_dict(): + await test_create_taxonomy_async(request_type=dict) + + +def test_create_taxonomy_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.CreateTaxonomyRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_taxonomy), + '__call__') as call: + call.return_value = policytagmanager.Taxonomy() + client.create_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_taxonomy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.CreateTaxonomyRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_taxonomy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) + await client.create_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_taxonomy_flattened(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_taxonomy( + parent='parent_value', + taxonomy=policytagmanager.Taxonomy(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].taxonomy + mock_val = policytagmanager.Taxonomy(name='name_value') + assert arg == mock_val + + +def test_create_taxonomy_flattened_error(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_taxonomy( + policytagmanager.CreateTaxonomyRequest(), + parent='parent_value', + taxonomy=policytagmanager.Taxonomy(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_taxonomy_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_taxonomy( + parent='parent_value', + taxonomy=policytagmanager.Taxonomy(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].taxonomy + mock_val = policytagmanager.Taxonomy(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_taxonomy_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_taxonomy( + policytagmanager.CreateTaxonomyRequest(), + parent='parent_value', + taxonomy=policytagmanager.Taxonomy(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + policytagmanager.DeleteTaxonomyRequest, + dict, +]) +def test_delete_taxonomy(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.DeleteTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_taxonomy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_taxonomy), + '__call__') as call: + client.delete_taxonomy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.DeleteTaxonomyRequest() + +@pytest.mark.asyncio +async def test_delete_taxonomy_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.DeleteTaxonomyRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.DeleteTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_taxonomy_async_from_dict(): + await test_delete_taxonomy_async(request_type=dict) + + +def test_delete_taxonomy_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.DeleteTaxonomyRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_taxonomy), + '__call__') as call: + call.return_value = None + client.delete_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_taxonomy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.DeleteTaxonomyRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_taxonomy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_taxonomy_flattened(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_taxonomy( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_taxonomy_flattened_error(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_taxonomy( + policytagmanager.DeleteTaxonomyRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_taxonomy_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_taxonomy( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_taxonomy_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_taxonomy( + policytagmanager.DeleteTaxonomyRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + policytagmanager.UpdateTaxonomyRequest, + dict, +]) +def test_update_taxonomy(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy( + name='name_value', + display_name='display_name_value', + description='description_value', + policy_tag_count=1715, + activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], + ) + response = client.update_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.UpdateTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.policy_tag_count == 1715 + assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] + + +def test_update_taxonomy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_taxonomy), + '__call__') as call: + client.update_taxonomy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.UpdateTaxonomyRequest() + +@pytest.mark.asyncio +async def test_update_taxonomy_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.UpdateTaxonomyRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy( + name='name_value', + display_name='display_name_value', + description='description_value', + policy_tag_count=1715, + activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], + )) + response = await client.update_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.UpdateTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.policy_tag_count == 1715 + assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] + + +@pytest.mark.asyncio +async def test_update_taxonomy_async_from_dict(): + await test_update_taxonomy_async(request_type=dict) + + +def test_update_taxonomy_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.UpdateTaxonomyRequest() + + request.taxonomy.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_taxonomy), + '__call__') as call: + call.return_value = policytagmanager.Taxonomy() + client.update_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'taxonomy.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_taxonomy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.UpdateTaxonomyRequest() + + request.taxonomy.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_taxonomy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) + await client.update_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'taxonomy.name=name_value', + ) in kw['metadata'] + + +def test_update_taxonomy_flattened(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_taxonomy( + taxonomy=policytagmanager.Taxonomy(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].taxonomy + mock_val = policytagmanager.Taxonomy(name='name_value') + assert arg == mock_val + + +def test_update_taxonomy_flattened_error(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_taxonomy( + policytagmanager.UpdateTaxonomyRequest(), + taxonomy=policytagmanager.Taxonomy(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_update_taxonomy_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_taxonomy( + taxonomy=policytagmanager.Taxonomy(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].taxonomy + mock_val = policytagmanager.Taxonomy(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_taxonomy_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_taxonomy( + policytagmanager.UpdateTaxonomyRequest(), + taxonomy=policytagmanager.Taxonomy(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + policytagmanager.ListTaxonomiesRequest, + dict, +]) +def test_list_taxonomies(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListTaxonomiesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.ListTaxonomiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTaxonomiesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_taxonomies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), + '__call__') as call: + client.list_taxonomies() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.ListTaxonomiesRequest() + +@pytest.mark.asyncio +async def test_list_taxonomies_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.ListTaxonomiesRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListTaxonomiesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.ListTaxonomiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTaxonomiesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_taxonomies_async_from_dict(): + await test_list_taxonomies_async(request_type=dict) + + +def test_list_taxonomies_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.ListTaxonomiesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), + '__call__') as call: + call.return_value = policytagmanager.ListTaxonomiesResponse() + client.list_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_taxonomies_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.ListTaxonomiesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListTaxonomiesResponse()) + await client.list_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_taxonomies_flattened(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListTaxonomiesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_taxonomies( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_taxonomies_flattened_error(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_taxonomies( + policytagmanager.ListTaxonomiesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_taxonomies_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListTaxonomiesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListTaxonomiesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_taxonomies( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_taxonomies_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_taxonomies( + policytagmanager.ListTaxonomiesRequest(), + parent='parent_value', + ) + + +def test_list_taxonomies_pager(transport_name: str = "grpc"): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + next_page_token='abc', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[], + next_page_token='def', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + ], + next_page_token='ghi', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_taxonomies(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, policytagmanager.Taxonomy) + for i in results) +def test_list_taxonomies_pages(transport_name: str = "grpc"): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + next_page_token='abc', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[], + next_page_token='def', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + ], + next_page_token='ghi', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + ), + RuntimeError, + ) + pages = list(client.list_taxonomies(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_taxonomies_async_pager(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + next_page_token='abc', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[], + next_page_token='def', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + ], + next_page_token='ghi', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_taxonomies(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, policytagmanager.Taxonomy) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_taxonomies_async_pages(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + next_page_token='abc', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[], + next_page_token='def', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + ], + next_page_token='ghi', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_taxonomies(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + policytagmanager.GetTaxonomyRequest, + dict, +]) +def test_get_taxonomy(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy( + name='name_value', + display_name='display_name_value', + description='description_value', + policy_tag_count=1715, + activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], + ) + response = client.get_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.GetTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.policy_tag_count == 1715 + assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] + + +def test_get_taxonomy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_taxonomy), + '__call__') as call: + client.get_taxonomy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.GetTaxonomyRequest() + +@pytest.mark.asyncio +async def test_get_taxonomy_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.GetTaxonomyRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy( + name='name_value', + display_name='display_name_value', + description='description_value', + policy_tag_count=1715, + activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], + )) + response = await client.get_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.GetTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.policy_tag_count == 1715 + assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] + + +@pytest.mark.asyncio +async def test_get_taxonomy_async_from_dict(): + await test_get_taxonomy_async(request_type=dict) + + +def test_get_taxonomy_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.GetTaxonomyRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_taxonomy), + '__call__') as call: + call.return_value = policytagmanager.Taxonomy() + client.get_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_taxonomy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.GetTaxonomyRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_taxonomy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) + await client.get_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_taxonomy_flattened(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_taxonomy( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_taxonomy_flattened_error(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_taxonomy( + policytagmanager.GetTaxonomyRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_taxonomy_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_taxonomy( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_taxonomy_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_taxonomy( + policytagmanager.GetTaxonomyRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + policytagmanager.CreatePolicyTagRequest, + dict, +]) +def test_create_policy_tag(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag( + name='name_value', + display_name='display_name_value', + description='description_value', + parent_policy_tag='parent_policy_tag_value', + child_policy_tags=['child_policy_tags_value'], + ) + response = client.create_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.CreatePolicyTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent_policy_tag == 'parent_policy_tag_value' + assert response.child_policy_tags == ['child_policy_tags_value'] + + +def test_create_policy_tag_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), + '__call__') as call: + client.create_policy_tag() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.CreatePolicyTagRequest() + +@pytest.mark.asyncio +async def test_create_policy_tag_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.CreatePolicyTagRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag( + name='name_value', + display_name='display_name_value', + description='description_value', + parent_policy_tag='parent_policy_tag_value', + child_policy_tags=['child_policy_tags_value'], + )) + response = await client.create_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.CreatePolicyTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent_policy_tag == 'parent_policy_tag_value' + assert response.child_policy_tags == ['child_policy_tags_value'] + + +@pytest.mark.asyncio +async def test_create_policy_tag_async_from_dict(): + await test_create_policy_tag_async(request_type=dict) + + +def test_create_policy_tag_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.CreatePolicyTagRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), + '__call__') as call: + call.return_value = policytagmanager.PolicyTag() + client.create_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_policy_tag_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.CreatePolicyTagRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) + await client.create_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_policy_tag_flattened(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_policy_tag( + parent='parent_value', + policy_tag=policytagmanager.PolicyTag(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].policy_tag + mock_val = policytagmanager.PolicyTag(name='name_value') + assert arg == mock_val + + +def test_create_policy_tag_flattened_error(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_policy_tag( + policytagmanager.CreatePolicyTagRequest(), + parent='parent_value', + policy_tag=policytagmanager.PolicyTag(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_policy_tag_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_policy_tag( + parent='parent_value', + policy_tag=policytagmanager.PolicyTag(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].policy_tag + mock_val = policytagmanager.PolicyTag(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_policy_tag_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_policy_tag( + policytagmanager.CreatePolicyTagRequest(), + parent='parent_value', + policy_tag=policytagmanager.PolicyTag(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + policytagmanager.DeletePolicyTagRequest, + dict, +]) +def test_delete_policy_tag(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.DeletePolicyTagRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_policy_tag_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), + '__call__') as call: + client.delete_policy_tag() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.DeletePolicyTagRequest() + +@pytest.mark.asyncio +async def test_delete_policy_tag_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.DeletePolicyTagRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.DeletePolicyTagRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_policy_tag_async_from_dict(): + await test_delete_policy_tag_async(request_type=dict) + + +def test_delete_policy_tag_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.DeletePolicyTagRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), + '__call__') as call: + call.return_value = None + client.delete_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_policy_tag_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.DeletePolicyTagRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_policy_tag_flattened(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_policy_tag( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_policy_tag_flattened_error(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_policy_tag( + policytagmanager.DeletePolicyTagRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_policy_tag_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_policy_tag( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_policy_tag_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_policy_tag( + policytagmanager.DeletePolicyTagRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + policytagmanager.UpdatePolicyTagRequest, + dict, +]) +def test_update_policy_tag(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag( + name='name_value', + display_name='display_name_value', + description='description_value', + parent_policy_tag='parent_policy_tag_value', + child_policy_tags=['child_policy_tags_value'], + ) + response = client.update_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.UpdatePolicyTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent_policy_tag == 'parent_policy_tag_value' + assert response.child_policy_tags == ['child_policy_tags_value'] + + +def test_update_policy_tag_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), + '__call__') as call: + client.update_policy_tag() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.UpdatePolicyTagRequest() + +@pytest.mark.asyncio +async def test_update_policy_tag_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.UpdatePolicyTagRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag( + name='name_value', + display_name='display_name_value', + description='description_value', + parent_policy_tag='parent_policy_tag_value', + child_policy_tags=['child_policy_tags_value'], + )) + response = await client.update_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.UpdatePolicyTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent_policy_tag == 'parent_policy_tag_value' + assert response.child_policy_tags == ['child_policy_tags_value'] + + +@pytest.mark.asyncio +async def test_update_policy_tag_async_from_dict(): + await test_update_policy_tag_async(request_type=dict) + + +def test_update_policy_tag_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.UpdatePolicyTagRequest() + + request.policy_tag.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), + '__call__') as call: + call.return_value = policytagmanager.PolicyTag() + client.update_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'policy_tag.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_policy_tag_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.UpdatePolicyTagRequest() + + request.policy_tag.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) + await client.update_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'policy_tag.name=name_value', + ) in kw['metadata'] + + +def test_update_policy_tag_flattened(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_policy_tag( + policy_tag=policytagmanager.PolicyTag(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].policy_tag + mock_val = policytagmanager.PolicyTag(name='name_value') + assert arg == mock_val + + +def test_update_policy_tag_flattened_error(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_policy_tag( + policytagmanager.UpdatePolicyTagRequest(), + policy_tag=policytagmanager.PolicyTag(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_update_policy_tag_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_policy_tag( + policy_tag=policytagmanager.PolicyTag(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].policy_tag + mock_val = policytagmanager.PolicyTag(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_policy_tag_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_policy_tag( + policytagmanager.UpdatePolicyTagRequest(), + policy_tag=policytagmanager.PolicyTag(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + policytagmanager.ListPolicyTagsRequest, + dict, +]) +def test_list_policy_tags(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListPolicyTagsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_policy_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.ListPolicyTagsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPolicyTagsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_policy_tags_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), + '__call__') as call: + client.list_policy_tags() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.ListPolicyTagsRequest() + +@pytest.mark.asyncio +async def test_list_policy_tags_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.ListPolicyTagsRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListPolicyTagsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_policy_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.ListPolicyTagsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPolicyTagsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_policy_tags_async_from_dict(): + await test_list_policy_tags_async(request_type=dict) + + +def test_list_policy_tags_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.ListPolicyTagsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), + '__call__') as call: + call.return_value = policytagmanager.ListPolicyTagsResponse() + client.list_policy_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_policy_tags_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.ListPolicyTagsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListPolicyTagsResponse()) + await client.list_policy_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_policy_tags_flattened(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListPolicyTagsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_policy_tags( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_policy_tags_flattened_error(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_policy_tags( + policytagmanager.ListPolicyTagsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_policy_tags_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListPolicyTagsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListPolicyTagsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_policy_tags( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_policy_tags_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_policy_tags( + policytagmanager.ListPolicyTagsRequest(), + parent='parent_value', + ) + + +def test_list_policy_tags_pager(transport_name: str = "grpc"): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + next_page_token='abc', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[], + next_page_token='def', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + ], + next_page_token='ghi', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_policy_tags(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, policytagmanager.PolicyTag) + for i in results) +def test_list_policy_tags_pages(transport_name: str = "grpc"): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + next_page_token='abc', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[], + next_page_token='def', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + ], + next_page_token='ghi', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + ), + RuntimeError, + ) + pages = list(client.list_policy_tags(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_policy_tags_async_pager(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + next_page_token='abc', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[], + next_page_token='def', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + ], + next_page_token='ghi', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_policy_tags(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, policytagmanager.PolicyTag) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_policy_tags_async_pages(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + next_page_token='abc', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[], + next_page_token='def', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + ], + next_page_token='ghi', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_policy_tags(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + policytagmanager.GetPolicyTagRequest, + dict, +]) +def test_get_policy_tag(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag( + name='name_value', + display_name='display_name_value', + description='description_value', + parent_policy_tag='parent_policy_tag_value', + child_policy_tags=['child_policy_tags_value'], + ) + response = client.get_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.GetPolicyTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent_policy_tag == 'parent_policy_tag_value' + assert response.child_policy_tags == ['child_policy_tags_value'] + + +def test_get_policy_tag_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_policy_tag), + '__call__') as call: + client.get_policy_tag() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.GetPolicyTagRequest() + +@pytest.mark.asyncio +async def test_get_policy_tag_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.GetPolicyTagRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag( + name='name_value', + display_name='display_name_value', + description='description_value', + parent_policy_tag='parent_policy_tag_value', + child_policy_tags=['child_policy_tags_value'], + )) + response = await client.get_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.GetPolicyTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent_policy_tag == 'parent_policy_tag_value' + assert response.child_policy_tags == ['child_policy_tags_value'] + + +@pytest.mark.asyncio +async def test_get_policy_tag_async_from_dict(): + await test_get_policy_tag_async(request_type=dict) + + +def test_get_policy_tag_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.GetPolicyTagRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_policy_tag), + '__call__') as call: + call.return_value = policytagmanager.PolicyTag() + client.get_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_policy_tag_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.GetPolicyTagRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_policy_tag), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) + await client.get_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_policy_tag_flattened(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_policy_tag( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_policy_tag_flattened_error(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_policy_tag( + policytagmanager.GetPolicyTagRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_policy_tag_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_policy_tag( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_policy_tag_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_policy_tag( + policytagmanager.GetPolicyTagRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.GetIamPolicyRequest, + dict, +]) +def test_get_iam_policy(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_get_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + + +def test_get_iam_policy_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_get_iam_policy_from_dict_foreign(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy(request={ + 'resource': 'resource_value', + 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.SetIamPolicyRequest, + dict, +]) +def test_set_iam_policy(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_set_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + + +def test_set_iam_policy_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_set_iam_policy_from_dict_foreign(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy(request={ + 'resource': 'resource_value', + 'policy': policy_pb2.Policy(version=774), + 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), + } + ) + call.assert_called() + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, +]) +def test_test_iam_permissions(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + client.test_iam_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + )) + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + + +def test_test_iam_permissions_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_test_iam_permissions_from_dict_foreign(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions(request={ + 'resource': 'resource_value', + 'permissions': ['permissions_value'], + } + ) + call.assert_called() + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PolicyTagManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PolicyTagManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.PolicyTagManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PolicyTagManagerClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PolicyTagManagerClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PolicyTagManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PolicyTagManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = PolicyTagManagerClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PolicyTagManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PolicyTagManagerGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.PolicyTagManagerGrpcTransport, + transports.PolicyTagManagerGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", +]) +def test_transport_kind(transport_name): + transport = PolicyTagManagerClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.PolicyTagManagerGrpcTransport, + ) + +def test_policy_tag_manager_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.PolicyTagManagerTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_policy_tag_manager_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.datacatalog_v1.services.policy_tag_manager.transports.PolicyTagManagerTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.PolicyTagManagerTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_taxonomy', + 'delete_taxonomy', + 'update_taxonomy', + 'list_taxonomies', + 'get_taxonomy', + 'create_policy_tag', + 'delete_policy_tag', + 'update_policy_tag', + 'list_policy_tags', + 'get_policy_tag', + 'get_iam_policy', + 'set_iam_policy', + 'test_iam_permissions', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_policy_tag_manager_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.datacatalog_v1.services.policy_tag_manager.transports.PolicyTagManagerTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PolicyTagManagerTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_policy_tag_manager_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.datacatalog_v1.services.policy_tag_manager.transports.PolicyTagManagerTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PolicyTagManagerTransport() + adc.assert_called_once() + + +def test_policy_tag_manager_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PolicyTagManagerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerGrpcTransport, + transports.PolicyTagManagerGrpcAsyncIOTransport, + ], +) +def test_policy_tag_manager_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerGrpcTransport, + transports.PolicyTagManagerGrpcAsyncIOTransport, + ], +) +def test_policy_tag_manager_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PolicyTagManagerGrpcTransport, grpc_helpers), + (transports.PolicyTagManagerGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_policy_tag_manager_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="datacatalog.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerGrpcTransport, transports.PolicyTagManagerGrpcAsyncIOTransport]) +def test_policy_tag_manager_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_policy_tag_manager_host_no_port(transport_name): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'datacatalog.googleapis.com:443' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_policy_tag_manager_host_with_port(transport_name): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'datacatalog.googleapis.com:8000' + ) + +def test_policy_tag_manager_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PolicyTagManagerGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_policy_tag_manager_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PolicyTagManagerGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerGrpcTransport, transports.PolicyTagManagerGrpcAsyncIOTransport]) +def test_policy_tag_manager_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerGrpcTransport, transports.PolicyTagManagerGrpcAsyncIOTransport]) +def test_policy_tag_manager_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_policy_tag_path(): + project = "squid" + location = "clam" + taxonomy = "whelk" + policy_tag = "octopus" + expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}/policyTags/{policy_tag}".format(project=project, location=location, taxonomy=taxonomy, policy_tag=policy_tag, ) + actual = PolicyTagManagerClient.policy_tag_path(project, location, taxonomy, policy_tag) + assert expected == actual + + +def test_parse_policy_tag_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "taxonomy": "cuttlefish", + "policy_tag": "mussel", + } + path = PolicyTagManagerClient.policy_tag_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_policy_tag_path(path) + assert expected == actual + +def test_taxonomy_path(): + project = "winkle" + location = "nautilus" + taxonomy = "scallop" + expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format(project=project, location=location, taxonomy=taxonomy, ) + actual = PolicyTagManagerClient.taxonomy_path(project, location, taxonomy) + assert expected == actual + + +def test_parse_taxonomy_path(): + expected = { + "project": "abalone", + "location": "squid", + "taxonomy": "clam", + } + path = PolicyTagManagerClient.taxonomy_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_taxonomy_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = PolicyTagManagerClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = PolicyTagManagerClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format(folder=folder, ) + actual = PolicyTagManagerClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = PolicyTagManagerClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format(organization=organization, ) + actual = PolicyTagManagerClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = PolicyTagManagerClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format(project=project, ) + actual = PolicyTagManagerClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = PolicyTagManagerClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = PolicyTagManagerClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = PolicyTagManagerClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.PolicyTagManagerTransport, '_prep_wrapped_messages') as prep: + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.PolicyTagManagerTransport, '_prep_wrapped_messages') as prep: + transport_class = PolicyTagManagerClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_delete_operation(transport: str = "grpc"): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport), + (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py b/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py new file mode 100644 index 000000000000..c4e33ec77510 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py @@ -0,0 +1,2144 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.datacatalog_v1.services.policy_tag_manager_serialization import PolicyTagManagerSerializationAsyncClient +from google.cloud.datacatalog_v1.services.policy_tag_manager_serialization import PolicyTagManagerSerializationClient +from google.cloud.datacatalog_v1.services.policy_tag_manager_serialization import transports +from google.cloud.datacatalog_v1.types import policytagmanager +from google.cloud.datacatalog_v1.types import policytagmanagerserialization +from google.cloud.datacatalog_v1.types import timestamps +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(None) is None + assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (PolicyTagManagerSerializationClient, "grpc"), + (PolicyTagManagerSerializationAsyncClient, "grpc_asyncio"), +]) +def test_policy_tag_manager_serialization_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'datacatalog.googleapis.com:443' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.PolicyTagManagerSerializationGrpcTransport, "grpc"), + (transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_policy_tag_manager_serialization_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (PolicyTagManagerSerializationClient, "grpc"), + (PolicyTagManagerSerializationAsyncClient, "grpc_asyncio"), +]) +def test_policy_tag_manager_serialization_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'datacatalog.googleapis.com:443' + ) + + +def test_policy_tag_manager_serialization_client_get_transport_class(): + transport = PolicyTagManagerSerializationClient.get_transport_class() + available_transports = [ + transports.PolicyTagManagerSerializationGrpcTransport, + ] + assert transport in available_transports + + transport = PolicyTagManagerSerializationClient.get_transport_class("grpc") + assert transport == transports.PolicyTagManagerSerializationGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc"), + (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(PolicyTagManagerSerializationClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationClient)) +@mock.patch.object(PolicyTagManagerSerializationAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationAsyncClient)) +def test_policy_tag_manager_serialization_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(PolicyTagManagerSerializationClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(PolicyTagManagerSerializationClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc", "true"), + (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc", "false"), + (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(PolicyTagManagerSerializationClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationClient)) +@mock.patch.object(PolicyTagManagerSerializationAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_policy_tag_manager_serialization_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + PolicyTagManagerSerializationClient, PolicyTagManagerSerializationAsyncClient +]) +@mock.patch.object(PolicyTagManagerSerializationClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationClient)) +@mock.patch.object(PolicyTagManagerSerializationAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationAsyncClient)) +def test_policy_tag_manager_serialization_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc"), + (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_policy_tag_manager_serialization_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc", grpc_helpers), + (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_policy_tag_manager_serialization_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_policy_tag_manager_serialization_client_client_options_from_dict(): + with mock.patch('google.cloud.datacatalog_v1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = PolicyTagManagerSerializationClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc", grpc_helpers), + (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_policy_tag_manager_serialization_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="datacatalog.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + policytagmanagerserialization.ReplaceTaxonomyRequest, + dict, +]) +def test_replace_taxonomy(request_type, transport: str = 'grpc'): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.replace_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy( + name='name_value', + display_name='display_name_value', + description='description_value', + policy_tag_count=1715, + activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], + ) + response = client.replace_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanagerserialization.ReplaceTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.policy_tag_count == 1715 + assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] + + +def test_replace_taxonomy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.replace_taxonomy), + '__call__') as call: + client.replace_taxonomy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanagerserialization.ReplaceTaxonomyRequest() + +@pytest.mark.asyncio +async def test_replace_taxonomy_async(transport: str = 'grpc_asyncio', request_type=policytagmanagerserialization.ReplaceTaxonomyRequest): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.replace_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy( + name='name_value', + display_name='display_name_value', + description='description_value', + policy_tag_count=1715, + activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], + )) + response = await client.replace_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanagerserialization.ReplaceTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.policy_tag_count == 1715 + assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] + + +@pytest.mark.asyncio +async def test_replace_taxonomy_async_from_dict(): + await test_replace_taxonomy_async(request_type=dict) + + +def test_replace_taxonomy_field_headers(): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanagerserialization.ReplaceTaxonomyRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.replace_taxonomy), + '__call__') as call: + call.return_value = policytagmanager.Taxonomy() + client.replace_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_replace_taxonomy_field_headers_async(): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanagerserialization.ReplaceTaxonomyRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.replace_taxonomy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) + await client.replace_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + policytagmanagerserialization.ImportTaxonomiesRequest, + dict, +]) +def test_import_taxonomies(request_type, transport: str = 'grpc'): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanagerserialization.ImportTaxonomiesResponse( + ) + response = client.import_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanagerserialization.ImportTaxonomiesResponse) + + +def test_import_taxonomies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_taxonomies), + '__call__') as call: + client.import_taxonomies() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest() + +@pytest.mark.asyncio +async def test_import_taxonomies_async(transport: str = 'grpc_asyncio', request_type=policytagmanagerserialization.ImportTaxonomiesRequest): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanagerserialization.ImportTaxonomiesResponse( + )) + response = await client.import_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanagerserialization.ImportTaxonomiesResponse) + + +@pytest.mark.asyncio +async def test_import_taxonomies_async_from_dict(): + await test_import_taxonomies_async(request_type=dict) + + +def test_import_taxonomies_field_headers(): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanagerserialization.ImportTaxonomiesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_taxonomies), + '__call__') as call: + call.return_value = policytagmanagerserialization.ImportTaxonomiesResponse() + client.import_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_import_taxonomies_field_headers_async(): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanagerserialization.ImportTaxonomiesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_taxonomies), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanagerserialization.ImportTaxonomiesResponse()) + await client.import_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + policytagmanagerserialization.ExportTaxonomiesRequest, + dict, +]) +def test_export_taxonomies(request_type, transport: str = 'grpc'): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanagerserialization.ExportTaxonomiesResponse( + ) + response = client.export_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanagerserialization.ExportTaxonomiesResponse) + + +def test_export_taxonomies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_taxonomies), + '__call__') as call: + client.export_taxonomies() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest() + +@pytest.mark.asyncio +async def test_export_taxonomies_async(transport: str = 'grpc_asyncio', request_type=policytagmanagerserialization.ExportTaxonomiesRequest): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanagerserialization.ExportTaxonomiesResponse( + )) + response = await client.export_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanagerserialization.ExportTaxonomiesResponse) + + +@pytest.mark.asyncio +async def test_export_taxonomies_async_from_dict(): + await test_export_taxonomies_async(request_type=dict) + + +def test_export_taxonomies_field_headers(): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanagerserialization.ExportTaxonomiesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_taxonomies), + '__call__') as call: + call.return_value = policytagmanagerserialization.ExportTaxonomiesResponse() + client.export_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_export_taxonomies_field_headers_async(): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanagerserialization.ExportTaxonomiesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_taxonomies), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanagerserialization.ExportTaxonomiesResponse()) + await client.export_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerSerializationClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PolicyTagManagerSerializationClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PolicyTagManagerSerializationClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerSerializationClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = PolicyTagManagerSerializationClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PolicyTagManagerSerializationGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.PolicyTagManagerSerializationGrpcTransport, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", +]) +def test_transport_kind(transport_name): + transport = PolicyTagManagerSerializationClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.PolicyTagManagerSerializationGrpcTransport, + ) + +def test_policy_tag_manager_serialization_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.PolicyTagManagerSerializationTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_policy_tag_manager_serialization_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.datacatalog_v1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.PolicyTagManagerSerializationTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'replace_taxonomy', + 'import_taxonomies', + 'export_taxonomies', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_policy_tag_manager_serialization_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.datacatalog_v1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PolicyTagManagerSerializationTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_policy_tag_manager_serialization_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.datacatalog_v1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PolicyTagManagerSerializationTransport() + adc.assert_called_once() + + +def test_policy_tag_manager_serialization_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PolicyTagManagerSerializationClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerSerializationGrpcTransport, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + ], +) +def test_policy_tag_manager_serialization_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerSerializationGrpcTransport, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + ], +) +def test_policy_tag_manager_serialization_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PolicyTagManagerSerializationGrpcTransport, grpc_helpers), + (transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_policy_tag_manager_serialization_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="datacatalog.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerSerializationGrpcTransport, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport]) +def test_policy_tag_manager_serialization_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_policy_tag_manager_serialization_host_no_port(transport_name): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'datacatalog.googleapis.com:443' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_policy_tag_manager_serialization_host_with_port(transport_name): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'datacatalog.googleapis.com:8000' + ) + +def test_policy_tag_manager_serialization_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_policy_tag_manager_serialization_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PolicyTagManagerSerializationGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerSerializationGrpcTransport, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport]) +def test_policy_tag_manager_serialization_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerSerializationGrpcTransport, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport]) +def test_policy_tag_manager_serialization_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_taxonomy_path(): + project = "squid" + location = "clam" + taxonomy = "whelk" + expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format(project=project, location=location, taxonomy=taxonomy, ) + actual = PolicyTagManagerSerializationClient.taxonomy_path(project, location, taxonomy) + assert expected == actual + + +def test_parse_taxonomy_path(): + expected = { + "project": "octopus", + "location": "oyster", + "taxonomy": "nudibranch", + } + path = PolicyTagManagerSerializationClient.taxonomy_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_taxonomy_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = PolicyTagManagerSerializationClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = PolicyTagManagerSerializationClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format(folder=folder, ) + actual = PolicyTagManagerSerializationClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = PolicyTagManagerSerializationClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format(organization=organization, ) + actual = PolicyTagManagerSerializationClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = PolicyTagManagerSerializationClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format(project=project, ) + actual = PolicyTagManagerSerializationClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = PolicyTagManagerSerializationClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = PolicyTagManagerSerializationClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = PolicyTagManagerSerializationClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.PolicyTagManagerSerializationTransport, '_prep_wrapped_messages') as prep: + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.PolicyTagManagerSerializationTransport, '_prep_wrapped_messages') as prep: + transport_class = PolicyTagManagerSerializationClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_delete_operation(transport: str = "grpc"): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport), + (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/.coveragerc b/owl-bot-staging/google-cloud-datacatalog/v1beta1/.coveragerc new file mode 100644 index 000000000000..8d9d83e17533 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/datacatalog/__init__.py + google/cloud/datacatalog/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/.flake8 b/owl-bot-staging/google-cloud-datacatalog/v1beta1/.flake8 new file mode 100644 index 000000000000..29227d4cf419 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/MANIFEST.in b/owl-bot-staging/google-cloud-datacatalog/v1beta1/MANIFEST.in new file mode 100644 index 000000000000..0e9fef34a3a9 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/datacatalog *.py +recursive-include google/cloud/datacatalog_v1beta1 *.py diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/README.rst b/owl-bot-staging/google-cloud-datacatalog/v1beta1/README.rst new file mode 100644 index 000000000000..8f53b24416fc --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Datacatalog API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Datacatalog API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/_static/custom.css b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/_static/custom.css new file mode 100644 index 000000000000..06423be0b592 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/conf.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/conf.py new file mode 100644 index 000000000000..aec9c23130e4 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-datacatalog documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-datacatalog" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-datacatalog-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-datacatalog.tex", + u"google-cloud-datacatalog Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-datacatalog", + u"Google Cloud Datacatalog Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-datacatalog", + u"google-cloud-datacatalog Documentation", + author, + "google-cloud-datacatalog", + "GAPIC library for Google Cloud Datacatalog API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/data_catalog.rst b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/data_catalog.rst new file mode 100644 index 000000000000..82ca26f399dc --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/data_catalog.rst @@ -0,0 +1,10 @@ +DataCatalog +----------------------------- + +.. automodule:: google.cloud.datacatalog_v1beta1.services.data_catalog + :members: + :inherited-members: + +.. automodule:: google.cloud.datacatalog_v1beta1.services.data_catalog.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/policy_tag_manager.rst b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/policy_tag_manager.rst new file mode 100644 index 000000000000..8971945c327f --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/policy_tag_manager.rst @@ -0,0 +1,10 @@ +PolicyTagManager +---------------------------------- + +.. automodule:: google.cloud.datacatalog_v1beta1.services.policy_tag_manager + :members: + :inherited-members: + +.. automodule:: google.cloud.datacatalog_v1beta1.services.policy_tag_manager.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/policy_tag_manager_serialization.rst b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/policy_tag_manager_serialization.rst new file mode 100644 index 000000000000..aed4c56cde06 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/policy_tag_manager_serialization.rst @@ -0,0 +1,6 @@ +PolicyTagManagerSerialization +----------------------------------------------- + +.. automodule:: google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization + :members: + :inherited-members: diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/services.rst b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/services.rst new file mode 100644 index 000000000000..4f762e1c60d4 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/services.rst @@ -0,0 +1,8 @@ +Services for Google Cloud Datacatalog v1beta1 API +================================================= +.. toctree:: + :maxdepth: 2 + + data_catalog + policy_tag_manager + policy_tag_manager_serialization diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/types.rst b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/types.rst new file mode 100644 index 000000000000..a1baedafba34 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Datacatalog v1beta1 API +============================================== + +.. automodule:: google.cloud.datacatalog_v1beta1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/index.rst b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/index.rst new file mode 100644 index 000000000000..ae7dac5f96ff --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + datacatalog_v1beta1/services + datacatalog_v1beta1/types diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/__init__.py new file mode 100644 index 000000000000..822441d552a0 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/__init__.py @@ -0,0 +1,183 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.datacatalog import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.datacatalog_v1beta1.services.data_catalog.client import DataCatalogClient +from google.cloud.datacatalog_v1beta1.services.data_catalog.async_client import DataCatalogAsyncClient +from google.cloud.datacatalog_v1beta1.services.policy_tag_manager.client import PolicyTagManagerClient +from google.cloud.datacatalog_v1beta1.services.policy_tag_manager.async_client import PolicyTagManagerAsyncClient +from google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization.client import PolicyTagManagerSerializationClient +from google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization.async_client import PolicyTagManagerSerializationAsyncClient + +from google.cloud.datacatalog_v1beta1.types.common import IntegratedSystem +from google.cloud.datacatalog_v1beta1.types.common import ManagingSystem +from google.cloud.datacatalog_v1beta1.types.datacatalog import CreateEntryGroupRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import CreateEntryRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import CreateTagRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import CreateTagTemplateFieldRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import CreateTagTemplateRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import DeleteEntryGroupRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import DeleteEntryRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import DeleteTagRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import DeleteTagTemplateFieldRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import DeleteTagTemplateRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import Entry +from google.cloud.datacatalog_v1beta1.types.datacatalog import EntryGroup +from google.cloud.datacatalog_v1beta1.types.datacatalog import GetEntryGroupRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import GetEntryRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import GetTagTemplateRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import ListEntriesRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import ListEntriesResponse +from google.cloud.datacatalog_v1beta1.types.datacatalog import ListEntryGroupsRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import ListEntryGroupsResponse +from google.cloud.datacatalog_v1beta1.types.datacatalog import ListTagsRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import ListTagsResponse +from google.cloud.datacatalog_v1beta1.types.datacatalog import LookupEntryRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import RenameTagTemplateFieldEnumValueRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import RenameTagTemplateFieldRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import SearchCatalogRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import SearchCatalogResponse +from google.cloud.datacatalog_v1beta1.types.datacatalog import UpdateEntryGroupRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import UpdateEntryRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import UpdateTagRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import UpdateTagTemplateFieldRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import UpdateTagTemplateRequest +from google.cloud.datacatalog_v1beta1.types.datacatalog import EntryType +from google.cloud.datacatalog_v1beta1.types.gcs_fileset_spec import GcsFilesetSpec +from google.cloud.datacatalog_v1beta1.types.gcs_fileset_spec import GcsFileSpec +from google.cloud.datacatalog_v1beta1.types.policytagmanager import CreatePolicyTagRequest +from google.cloud.datacatalog_v1beta1.types.policytagmanager import CreateTaxonomyRequest +from google.cloud.datacatalog_v1beta1.types.policytagmanager import DeletePolicyTagRequest +from google.cloud.datacatalog_v1beta1.types.policytagmanager import DeleteTaxonomyRequest +from google.cloud.datacatalog_v1beta1.types.policytagmanager import GetPolicyTagRequest +from google.cloud.datacatalog_v1beta1.types.policytagmanager import GetTaxonomyRequest +from google.cloud.datacatalog_v1beta1.types.policytagmanager import ListPolicyTagsRequest +from google.cloud.datacatalog_v1beta1.types.policytagmanager import ListPolicyTagsResponse +from google.cloud.datacatalog_v1beta1.types.policytagmanager import ListTaxonomiesRequest +from google.cloud.datacatalog_v1beta1.types.policytagmanager import ListTaxonomiesResponse +from google.cloud.datacatalog_v1beta1.types.policytagmanager import PolicyTag +from google.cloud.datacatalog_v1beta1.types.policytagmanager import Taxonomy +from google.cloud.datacatalog_v1beta1.types.policytagmanager import UpdatePolicyTagRequest +from google.cloud.datacatalog_v1beta1.types.policytagmanager import UpdateTaxonomyRequest +from google.cloud.datacatalog_v1beta1.types.policytagmanagerserialization import ExportTaxonomiesRequest +from google.cloud.datacatalog_v1beta1.types.policytagmanagerserialization import ExportTaxonomiesResponse +from google.cloud.datacatalog_v1beta1.types.policytagmanagerserialization import ImportTaxonomiesRequest +from google.cloud.datacatalog_v1beta1.types.policytagmanagerserialization import ImportTaxonomiesResponse +from google.cloud.datacatalog_v1beta1.types.policytagmanagerserialization import InlineSource +from google.cloud.datacatalog_v1beta1.types.policytagmanagerserialization import SerializedPolicyTag +from google.cloud.datacatalog_v1beta1.types.policytagmanagerserialization import SerializedTaxonomy +from google.cloud.datacatalog_v1beta1.types.schema import ColumnSchema +from google.cloud.datacatalog_v1beta1.types.schema import Schema +from google.cloud.datacatalog_v1beta1.types.search import SearchCatalogResult +from google.cloud.datacatalog_v1beta1.types.search import SearchResultType +from google.cloud.datacatalog_v1beta1.types.table_spec import BigQueryDateShardedSpec +from google.cloud.datacatalog_v1beta1.types.table_spec import BigQueryTableSpec +from google.cloud.datacatalog_v1beta1.types.table_spec import TableSpec +from google.cloud.datacatalog_v1beta1.types.table_spec import ViewSpec +from google.cloud.datacatalog_v1beta1.types.table_spec import TableSourceType +from google.cloud.datacatalog_v1beta1.types.tags import FieldType +from google.cloud.datacatalog_v1beta1.types.tags import Tag +from google.cloud.datacatalog_v1beta1.types.tags import TagField +from google.cloud.datacatalog_v1beta1.types.tags import TagTemplate +from google.cloud.datacatalog_v1beta1.types.tags import TagTemplateField +from google.cloud.datacatalog_v1beta1.types.timestamps import SystemTimestamps +from google.cloud.datacatalog_v1beta1.types.usage import UsageSignal +from google.cloud.datacatalog_v1beta1.types.usage import UsageStats + +__all__ = ('DataCatalogClient', + 'DataCatalogAsyncClient', + 'PolicyTagManagerClient', + 'PolicyTagManagerAsyncClient', + 'PolicyTagManagerSerializationClient', + 'PolicyTagManagerSerializationAsyncClient', + 'IntegratedSystem', + 'ManagingSystem', + 'CreateEntryGroupRequest', + 'CreateEntryRequest', + 'CreateTagRequest', + 'CreateTagTemplateFieldRequest', + 'CreateTagTemplateRequest', + 'DeleteEntryGroupRequest', + 'DeleteEntryRequest', + 'DeleteTagRequest', + 'DeleteTagTemplateFieldRequest', + 'DeleteTagTemplateRequest', + 'Entry', + 'EntryGroup', + 'GetEntryGroupRequest', + 'GetEntryRequest', + 'GetTagTemplateRequest', + 'ListEntriesRequest', + 'ListEntriesResponse', + 'ListEntryGroupsRequest', + 'ListEntryGroupsResponse', + 'ListTagsRequest', + 'ListTagsResponse', + 'LookupEntryRequest', + 'RenameTagTemplateFieldEnumValueRequest', + 'RenameTagTemplateFieldRequest', + 'SearchCatalogRequest', + 'SearchCatalogResponse', + 'UpdateEntryGroupRequest', + 'UpdateEntryRequest', + 'UpdateTagRequest', + 'UpdateTagTemplateFieldRequest', + 'UpdateTagTemplateRequest', + 'EntryType', + 'GcsFilesetSpec', + 'GcsFileSpec', + 'CreatePolicyTagRequest', + 'CreateTaxonomyRequest', + 'DeletePolicyTagRequest', + 'DeleteTaxonomyRequest', + 'GetPolicyTagRequest', + 'GetTaxonomyRequest', + 'ListPolicyTagsRequest', + 'ListPolicyTagsResponse', + 'ListTaxonomiesRequest', + 'ListTaxonomiesResponse', + 'PolicyTag', + 'Taxonomy', + 'UpdatePolicyTagRequest', + 'UpdateTaxonomyRequest', + 'ExportTaxonomiesRequest', + 'ExportTaxonomiesResponse', + 'ImportTaxonomiesRequest', + 'ImportTaxonomiesResponse', + 'InlineSource', + 'SerializedPolicyTag', + 'SerializedTaxonomy', + 'ColumnSchema', + 'Schema', + 'SearchCatalogResult', + 'SearchResultType', + 'BigQueryDateShardedSpec', + 'BigQueryTableSpec', + 'TableSpec', + 'ViewSpec', + 'TableSourceType', + 'FieldType', + 'Tag', + 'TagField', + 'TagTemplate', + 'TagTemplateField', + 'SystemTimestamps', + 'UsageSignal', + 'UsageStats', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/gapic_version.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/gapic_version.py new file mode 100644 index 000000000000..360a0d13ebdd --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/py.typed b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/py.typed new file mode 100644 index 000000000000..bb4088a3c198 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-datacatalog package uses inline types. diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/__init__.py new file mode 100644 index 000000000000..e564e56f1fb5 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/__init__.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.datacatalog_v1beta1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.data_catalog import DataCatalogClient +from .services.data_catalog import DataCatalogAsyncClient +from .services.policy_tag_manager import PolicyTagManagerClient +from .services.policy_tag_manager import PolicyTagManagerAsyncClient +from .services.policy_tag_manager_serialization import PolicyTagManagerSerializationClient +from .services.policy_tag_manager_serialization import PolicyTagManagerSerializationAsyncClient + +from .types.common import IntegratedSystem +from .types.common import ManagingSystem +from .types.datacatalog import CreateEntryGroupRequest +from .types.datacatalog import CreateEntryRequest +from .types.datacatalog import CreateTagRequest +from .types.datacatalog import CreateTagTemplateFieldRequest +from .types.datacatalog import CreateTagTemplateRequest +from .types.datacatalog import DeleteEntryGroupRequest +from .types.datacatalog import DeleteEntryRequest +from .types.datacatalog import DeleteTagRequest +from .types.datacatalog import DeleteTagTemplateFieldRequest +from .types.datacatalog import DeleteTagTemplateRequest +from .types.datacatalog import Entry +from .types.datacatalog import EntryGroup +from .types.datacatalog import GetEntryGroupRequest +from .types.datacatalog import GetEntryRequest +from .types.datacatalog import GetTagTemplateRequest +from .types.datacatalog import ListEntriesRequest +from .types.datacatalog import ListEntriesResponse +from .types.datacatalog import ListEntryGroupsRequest +from .types.datacatalog import ListEntryGroupsResponse +from .types.datacatalog import ListTagsRequest +from .types.datacatalog import ListTagsResponse +from .types.datacatalog import LookupEntryRequest +from .types.datacatalog import RenameTagTemplateFieldEnumValueRequest +from .types.datacatalog import RenameTagTemplateFieldRequest +from .types.datacatalog import SearchCatalogRequest +from .types.datacatalog import SearchCatalogResponse +from .types.datacatalog import UpdateEntryGroupRequest +from .types.datacatalog import UpdateEntryRequest +from .types.datacatalog import UpdateTagRequest +from .types.datacatalog import UpdateTagTemplateFieldRequest +from .types.datacatalog import UpdateTagTemplateRequest +from .types.datacatalog import EntryType +from .types.gcs_fileset_spec import GcsFilesetSpec +from .types.gcs_fileset_spec import GcsFileSpec +from .types.policytagmanager import CreatePolicyTagRequest +from .types.policytagmanager import CreateTaxonomyRequest +from .types.policytagmanager import DeletePolicyTagRequest +from .types.policytagmanager import DeleteTaxonomyRequest +from .types.policytagmanager import GetPolicyTagRequest +from .types.policytagmanager import GetTaxonomyRequest +from .types.policytagmanager import ListPolicyTagsRequest +from .types.policytagmanager import ListPolicyTagsResponse +from .types.policytagmanager import ListTaxonomiesRequest +from .types.policytagmanager import ListTaxonomiesResponse +from .types.policytagmanager import PolicyTag +from .types.policytagmanager import Taxonomy +from .types.policytagmanager import UpdatePolicyTagRequest +from .types.policytagmanager import UpdateTaxonomyRequest +from .types.policytagmanagerserialization import ExportTaxonomiesRequest +from .types.policytagmanagerserialization import ExportTaxonomiesResponse +from .types.policytagmanagerserialization import ImportTaxonomiesRequest +from .types.policytagmanagerserialization import ImportTaxonomiesResponse +from .types.policytagmanagerserialization import InlineSource +from .types.policytagmanagerserialization import SerializedPolicyTag +from .types.policytagmanagerserialization import SerializedTaxonomy +from .types.schema import ColumnSchema +from .types.schema import Schema +from .types.search import SearchCatalogResult +from .types.search import SearchResultType +from .types.table_spec import BigQueryDateShardedSpec +from .types.table_spec import BigQueryTableSpec +from .types.table_spec import TableSpec +from .types.table_spec import ViewSpec +from .types.table_spec import TableSourceType +from .types.tags import FieldType +from .types.tags import Tag +from .types.tags import TagField +from .types.tags import TagTemplate +from .types.tags import TagTemplateField +from .types.timestamps import SystemTimestamps +from .types.usage import UsageSignal +from .types.usage import UsageStats + +__all__ = ( + 'DataCatalogAsyncClient', + 'PolicyTagManagerAsyncClient', + 'PolicyTagManagerSerializationAsyncClient', +'BigQueryDateShardedSpec', +'BigQueryTableSpec', +'ColumnSchema', +'CreateEntryGroupRequest', +'CreateEntryRequest', +'CreatePolicyTagRequest', +'CreateTagRequest', +'CreateTagTemplateFieldRequest', +'CreateTagTemplateRequest', +'CreateTaxonomyRequest', +'DataCatalogClient', +'DeleteEntryGroupRequest', +'DeleteEntryRequest', +'DeletePolicyTagRequest', +'DeleteTagRequest', +'DeleteTagTemplateFieldRequest', +'DeleteTagTemplateRequest', +'DeleteTaxonomyRequest', +'Entry', +'EntryGroup', +'EntryType', +'ExportTaxonomiesRequest', +'ExportTaxonomiesResponse', +'FieldType', +'GcsFileSpec', +'GcsFilesetSpec', +'GetEntryGroupRequest', +'GetEntryRequest', +'GetPolicyTagRequest', +'GetTagTemplateRequest', +'GetTaxonomyRequest', +'ImportTaxonomiesRequest', +'ImportTaxonomiesResponse', +'InlineSource', +'IntegratedSystem', +'ListEntriesRequest', +'ListEntriesResponse', +'ListEntryGroupsRequest', +'ListEntryGroupsResponse', +'ListPolicyTagsRequest', +'ListPolicyTagsResponse', +'ListTagsRequest', +'ListTagsResponse', +'ListTaxonomiesRequest', +'ListTaxonomiesResponse', +'LookupEntryRequest', +'ManagingSystem', +'PolicyTag', +'PolicyTagManagerClient', +'PolicyTagManagerSerializationClient', +'RenameTagTemplateFieldEnumValueRequest', +'RenameTagTemplateFieldRequest', +'Schema', +'SearchCatalogRequest', +'SearchCatalogResponse', +'SearchCatalogResult', +'SearchResultType', +'SerializedPolicyTag', +'SerializedTaxonomy', +'SystemTimestamps', +'TableSourceType', +'TableSpec', +'Tag', +'TagField', +'TagTemplate', +'TagTemplateField', +'Taxonomy', +'UpdateEntryGroupRequest', +'UpdateEntryRequest', +'UpdatePolicyTagRequest', +'UpdateTagRequest', +'UpdateTagTemplateFieldRequest', +'UpdateTagTemplateRequest', +'UpdateTaxonomyRequest', +'UsageSignal', +'UsageStats', +'ViewSpec', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/gapic_metadata.json b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/gapic_metadata.json new file mode 100644 index 000000000000..b40fba91c7c5 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/gapic_metadata.json @@ -0,0 +1,481 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.datacatalog_v1beta1", + "protoPackage": "google.cloud.datacatalog.v1beta1", + "schema": "1.0", + "services": { + "DataCatalog": { + "clients": { + "grpc": { + "libraryClient": "DataCatalogClient", + "rpcs": { + "CreateEntry": { + "methods": [ + "create_entry" + ] + }, + "CreateEntryGroup": { + "methods": [ + "create_entry_group" + ] + }, + "CreateTag": { + "methods": [ + "create_tag" + ] + }, + "CreateTagTemplate": { + "methods": [ + "create_tag_template" + ] + }, + "CreateTagTemplateField": { + "methods": [ + "create_tag_template_field" + ] + }, + "DeleteEntry": { + "methods": [ + "delete_entry" + ] + }, + "DeleteEntryGroup": { + "methods": [ + "delete_entry_group" + ] + }, + "DeleteTag": { + "methods": [ + "delete_tag" + ] + }, + "DeleteTagTemplate": { + "methods": [ + "delete_tag_template" + ] + }, + "DeleteTagTemplateField": { + "methods": [ + "delete_tag_template_field" + ] + }, + "GetEntry": { + "methods": [ + "get_entry" + ] + }, + "GetEntryGroup": { + "methods": [ + "get_entry_group" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetTagTemplate": { + "methods": [ + "get_tag_template" + ] + }, + "ListEntries": { + "methods": [ + "list_entries" + ] + }, + "ListEntryGroups": { + "methods": [ + "list_entry_groups" + ] + }, + "ListTags": { + "methods": [ + "list_tags" + ] + }, + "LookupEntry": { + "methods": [ + "lookup_entry" + ] + }, + "RenameTagTemplateField": { + "methods": [ + "rename_tag_template_field" + ] + }, + "RenameTagTemplateFieldEnumValue": { + "methods": [ + "rename_tag_template_field_enum_value" + ] + }, + "SearchCatalog": { + "methods": [ + "search_catalog" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateEntry": { + "methods": [ + "update_entry" + ] + }, + "UpdateEntryGroup": { + "methods": [ + "update_entry_group" + ] + }, + "UpdateTag": { + "methods": [ + "update_tag" + ] + }, + "UpdateTagTemplate": { + "methods": [ + "update_tag_template" + ] + }, + "UpdateTagTemplateField": { + "methods": [ + "update_tag_template_field" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DataCatalogAsyncClient", + "rpcs": { + "CreateEntry": { + "methods": [ + "create_entry" + ] + }, + "CreateEntryGroup": { + "methods": [ + "create_entry_group" + ] + }, + "CreateTag": { + "methods": [ + "create_tag" + ] + }, + "CreateTagTemplate": { + "methods": [ + "create_tag_template" + ] + }, + "CreateTagTemplateField": { + "methods": [ + "create_tag_template_field" + ] + }, + "DeleteEntry": { + "methods": [ + "delete_entry" + ] + }, + "DeleteEntryGroup": { + "methods": [ + "delete_entry_group" + ] + }, + "DeleteTag": { + "methods": [ + "delete_tag" + ] + }, + "DeleteTagTemplate": { + "methods": [ + "delete_tag_template" + ] + }, + "DeleteTagTemplateField": { + "methods": [ + "delete_tag_template_field" + ] + }, + "GetEntry": { + "methods": [ + "get_entry" + ] + }, + "GetEntryGroup": { + "methods": [ + "get_entry_group" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetTagTemplate": { + "methods": [ + "get_tag_template" + ] + }, + "ListEntries": { + "methods": [ + "list_entries" + ] + }, + "ListEntryGroups": { + "methods": [ + "list_entry_groups" + ] + }, + "ListTags": { + "methods": [ + "list_tags" + ] + }, + "LookupEntry": { + "methods": [ + "lookup_entry" + ] + }, + "RenameTagTemplateField": { + "methods": [ + "rename_tag_template_field" + ] + }, + "RenameTagTemplateFieldEnumValue": { + "methods": [ + "rename_tag_template_field_enum_value" + ] + }, + "SearchCatalog": { + "methods": [ + "search_catalog" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateEntry": { + "methods": [ + "update_entry" + ] + }, + "UpdateEntryGroup": { + "methods": [ + "update_entry_group" + ] + }, + "UpdateTag": { + "methods": [ + "update_tag" + ] + }, + "UpdateTagTemplate": { + "methods": [ + "update_tag_template" + ] + }, + "UpdateTagTemplateField": { + "methods": [ + "update_tag_template_field" + ] + } + } + } + } + }, + "PolicyTagManager": { + "clients": { + "grpc": { + "libraryClient": "PolicyTagManagerClient", + "rpcs": { + "CreatePolicyTag": { + "methods": [ + "create_policy_tag" + ] + }, + "CreateTaxonomy": { + "methods": [ + "create_taxonomy" + ] + }, + "DeletePolicyTag": { + "methods": [ + "delete_policy_tag" + ] + }, + "DeleteTaxonomy": { + "methods": [ + "delete_taxonomy" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetPolicyTag": { + "methods": [ + "get_policy_tag" + ] + }, + "GetTaxonomy": { + "methods": [ + "get_taxonomy" + ] + }, + "ListPolicyTags": { + "methods": [ + "list_policy_tags" + ] + }, + "ListTaxonomies": { + "methods": [ + "list_taxonomies" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdatePolicyTag": { + "methods": [ + "update_policy_tag" + ] + }, + "UpdateTaxonomy": { + "methods": [ + "update_taxonomy" + ] + } + } + }, + "grpc-async": { + "libraryClient": "PolicyTagManagerAsyncClient", + "rpcs": { + "CreatePolicyTag": { + "methods": [ + "create_policy_tag" + ] + }, + "CreateTaxonomy": { + "methods": [ + "create_taxonomy" + ] + }, + "DeletePolicyTag": { + "methods": [ + "delete_policy_tag" + ] + }, + "DeleteTaxonomy": { + "methods": [ + "delete_taxonomy" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "GetPolicyTag": { + "methods": [ + "get_policy_tag" + ] + }, + "GetTaxonomy": { + "methods": [ + "get_taxonomy" + ] + }, + "ListPolicyTags": { + "methods": [ + "list_policy_tags" + ] + }, + "ListTaxonomies": { + "methods": [ + "list_taxonomies" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdatePolicyTag": { + "methods": [ + "update_policy_tag" + ] + }, + "UpdateTaxonomy": { + "methods": [ + "update_taxonomy" + ] + } + } + } + } + }, + "PolicyTagManagerSerialization": { + "clients": { + "grpc": { + "libraryClient": "PolicyTagManagerSerializationClient", + "rpcs": { + "ExportTaxonomies": { + "methods": [ + "export_taxonomies" + ] + }, + "ImportTaxonomies": { + "methods": [ + "import_taxonomies" + ] + } + } + }, + "grpc-async": { + "libraryClient": "PolicyTagManagerSerializationAsyncClient", + "rpcs": { + "ExportTaxonomies": { + "methods": [ + "export_taxonomies" + ] + }, + "ImportTaxonomies": { + "methods": [ + "import_taxonomies" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/gapic_version.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/gapic_version.py new file mode 100644 index 000000000000..360a0d13ebdd --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/py.typed b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/py.typed new file mode 100644 index 000000000000..bb4088a3c198 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-datacatalog package uses inline types. diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/__init__.py new file mode 100644 index 000000000000..89a37dc92c5a --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/__init__.py new file mode 100644 index 000000000000..e703e914bb2c --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import DataCatalogClient +from .async_client import DataCatalogAsyncClient + +__all__ = ( + 'DataCatalogClient', + 'DataCatalogAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py new file mode 100644 index 000000000000..49619a4b91e3 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py @@ -0,0 +1,3653 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.datacatalog_v1beta1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.datacatalog_v1beta1.services.data_catalog import pagers +from google.cloud.datacatalog_v1beta1.types import common +from google.cloud.datacatalog_v1beta1.types import datacatalog +from google.cloud.datacatalog_v1beta1.types import gcs_fileset_spec +from google.cloud.datacatalog_v1beta1.types import schema +from google.cloud.datacatalog_v1beta1.types import search +from google.cloud.datacatalog_v1beta1.types import table_spec +from google.cloud.datacatalog_v1beta1.types import tags +from google.cloud.datacatalog_v1beta1.types import timestamps +from google.cloud.datacatalog_v1beta1.types import usage +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import DataCatalogGrpcAsyncIOTransport +from .client import DataCatalogClient + + +class DataCatalogAsyncClient: + """Data Catalog API service allows clients to discover, + understand, and manage their data. + """ + + _client: DataCatalogClient + + DEFAULT_ENDPOINT = DataCatalogClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DataCatalogClient.DEFAULT_MTLS_ENDPOINT + + entry_path = staticmethod(DataCatalogClient.entry_path) + parse_entry_path = staticmethod(DataCatalogClient.parse_entry_path) + entry_group_path = staticmethod(DataCatalogClient.entry_group_path) + parse_entry_group_path = staticmethod(DataCatalogClient.parse_entry_group_path) + tag_path = staticmethod(DataCatalogClient.tag_path) + parse_tag_path = staticmethod(DataCatalogClient.parse_tag_path) + tag_template_path = staticmethod(DataCatalogClient.tag_template_path) + parse_tag_template_path = staticmethod(DataCatalogClient.parse_tag_template_path) + tag_template_field_path = staticmethod(DataCatalogClient.tag_template_field_path) + parse_tag_template_field_path = staticmethod(DataCatalogClient.parse_tag_template_field_path) + tag_template_field_enum_value_path = staticmethod(DataCatalogClient.tag_template_field_enum_value_path) + parse_tag_template_field_enum_value_path = staticmethod(DataCatalogClient.parse_tag_template_field_enum_value_path) + common_billing_account_path = staticmethod(DataCatalogClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(DataCatalogClient.parse_common_billing_account_path) + common_folder_path = staticmethod(DataCatalogClient.common_folder_path) + parse_common_folder_path = staticmethod(DataCatalogClient.parse_common_folder_path) + common_organization_path = staticmethod(DataCatalogClient.common_organization_path) + parse_common_organization_path = staticmethod(DataCatalogClient.parse_common_organization_path) + common_project_path = staticmethod(DataCatalogClient.common_project_path) + parse_common_project_path = staticmethod(DataCatalogClient.parse_common_project_path) + common_location_path = staticmethod(DataCatalogClient.common_location_path) + parse_common_location_path = staticmethod(DataCatalogClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataCatalogAsyncClient: The constructed client. + """ + return DataCatalogClient.from_service_account_info.__func__(DataCatalogAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataCatalogAsyncClient: The constructed client. + """ + return DataCatalogClient.from_service_account_file.__func__(DataCatalogAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DataCatalogClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DataCatalogTransport: + """Returns the transport used by the client instance. + + Returns: + DataCatalogTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(DataCatalogClient).get_transport_class, type(DataCatalogClient)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, DataCatalogTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data catalog client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DataCatalogTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DataCatalogClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def search_catalog(self, + request: Optional[Union[datacatalog.SearchCatalogRequest, dict]] = None, + *, + scope: Optional[datacatalog.SearchCatalogRequest.Scope] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchCatalogAsyncPager: + r"""Searches Data Catalog for multiple resources like entries, tags + that match a query. + + This is a custom method + (https://cloud.google.com/apis/design/custom_methods) and does + not return the complete resource, only the resource identifier + and high level fields. Clients can subsequently call ``Get`` + methods. + + Note that Data Catalog search queries do not guarantee full + recall. Query results that match your query may not be returned, + even in subsequent result pages. Also note that results returned + (and not returned) can vary across repeated search queries. + + See `Data Catalog Search + Syntax `__ + for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_search_catalog(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.SearchCatalogRequest( + ) + + # Make the request + page_result = client.search_catalog(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.SearchCatalogRequest, dict]]): + The request object. Request message for + [SearchCatalog][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog]. + scope (:class:`google.cloud.datacatalog_v1beta1.types.SearchCatalogRequest.Scope`): + Required. The scope of this search request. A ``scope`` + that has empty ``include_org_ids``, + ``include_project_ids`` AND false + ``include_gcp_public_datasets`` is considered invalid. + Data Catalog will return an error in such a case. + + This corresponds to the ``scope`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (:class:`str`): + Optional. The query string in search query syntax. An + empty query string will result in all data assets (in + the specified scope) that the user has access to. Query + strings can be simple as "x" or more qualified as: + + - name:x + - column:x + - description:y + + Note: Query tokens need to have a minimum of 3 + characters for substring matching to work correctly. See + `Data Catalog Search + Syntax `__ + for more information. + + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.SearchCatalogAsyncPager: + Response message for + [SearchCatalog][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([scope, query]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.SearchCatalogRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if scope is not None: + request.scope = scope + if query is not None: + request.query = query + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.search_catalog, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.SearchCatalogAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_entry_group(self, + request: Optional[Union[datacatalog.CreateEntryGroupRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry_group_id: Optional[str] = None, + entry_group: Optional[datacatalog.EntryGroup] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.EntryGroup: + r"""A maximum of 10,000 entry groups may be created per organization + across all locations. + + Users should enable the Data Catalog API in the project + identified by the ``parent`` parameter (see [Data Catalog + Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_create_entry_group(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.CreateEntryGroupRequest( + parent="parent_value", + entry_group_id="entry_group_id_value", + ) + + # Make the request + response = await client.create_entry_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.CreateEntryGroupRequest, dict]]): + The request object. Request message for + [CreateEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntryGroup]. + parent (:class:`str`): + Required. The name of the project this entry group is + in. Example: + + - projects/{project_id}/locations/{location} + + Note that this EntryGroup and its child resources may + not actually be stored in the location in this name. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_group_id (:class:`str`): + Required. The id of the entry group + to create. The id must begin with a + letter or underscore, contain only + English letters, numbers and + underscores, and be at most 64 + characters. + + This corresponds to the ``entry_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_group (:class:`google.cloud.datacatalog_v1beta1.types.EntryGroup`): + The entry group to create. Defaults + to an empty entry group. + + This corresponds to the ``entry_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.EntryGroup: + EntryGroup Metadata. + An EntryGroup resource represents a logical grouping + of zero or more Data Catalog + [Entry][google.cloud.datacatalog.v1beta1.Entry] + resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entry_group_id, entry_group]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.CreateEntryGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry_group_id is not None: + request.entry_group_id = entry_group_id + if entry_group is not None: + request.entry_group = entry_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_entry_group, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_entry_group(self, + request: Optional[Union[datacatalog.UpdateEntryGroupRequest, dict]] = None, + *, + entry_group: Optional[datacatalog.EntryGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.EntryGroup: + r"""Updates an EntryGroup. The user should enable the Data Catalog + API in the project identified by the ``entry_group.name`` + parameter (see [Data Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_update_entry_group(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.UpdateEntryGroupRequest( + ) + + # Make the request + response = await client.update_entry_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.UpdateEntryGroupRequest, dict]]): + The request object. Request message for + [UpdateEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntryGroup]. + entry_group (:class:`google.cloud.datacatalog_v1beta1.types.EntryGroup`): + Required. The updated entry group. + "name" field must be set. + + This corresponds to the ``entry_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Names of fields whose values to + overwrite on an entry group. + If this parameter is absent or empty, + all modifiable fields are overwritten. + If such fields are non-required and + omitted in the request body, their + values are emptied. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.EntryGroup: + EntryGroup Metadata. + An EntryGroup resource represents a logical grouping + of zero or more Data Catalog + [Entry][google.cloud.datacatalog.v1beta1.Entry] + resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([entry_group, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.UpdateEntryGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entry_group is not None: + request.entry_group = entry_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_entry_group, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entry_group.name", request.entry_group.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_entry_group(self, + request: Optional[Union[datacatalog.GetEntryGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + read_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.EntryGroup: + r"""Gets an EntryGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_get_entry_group(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.GetEntryGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.GetEntryGroupRequest, dict]]): + The request object. Request message for + [GetEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.GetEntryGroup]. + name (:class:`str`): + Required. The name of the entry group. For example, + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + read_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The fields to return. If not set or + empty, all fields are returned. + + This corresponds to the ``read_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.EntryGroup: + EntryGroup Metadata. + An EntryGroup resource represents a logical grouping + of zero or more Data Catalog + [Entry][google.cloud.datacatalog.v1beta1.Entry] + resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, read_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.GetEntryGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if read_mask is not None: + request.read_mask = read_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_entry_group, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_entry_group(self, + request: Optional[Union[datacatalog.DeleteEntryGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an EntryGroup. Only entry groups that do not contain + entries can be deleted. Users should enable the Data Catalog API + in the project identified by the ``name`` parameter (see [Data + Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_delete_entry_group(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteEntryGroupRequest( + name="name_value", + ) + + # Make the request + await client.delete_entry_group(request=request) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.DeleteEntryGroupRequest, dict]]): + The request object. Request message for + [DeleteEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntryGroup]. + name (:class:`str`): + Required. The name of the entry group. For example, + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.DeleteEntryGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_entry_group, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_entry_groups(self, + request: Optional[Union[datacatalog.ListEntryGroupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntryGroupsAsyncPager: + r"""Lists entry groups. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_list_entry_groups(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ListEntryGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.ListEntryGroupsRequest, dict]]): + The request object. Request message for + [ListEntryGroups][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntryGroups]. + parent (:class:`str`): + Required. The name of the location that contains the + entry groups, which can be provided in URL format. + Example: + + - projects/{project_id}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListEntryGroupsAsyncPager: + Response message for + [ListEntryGroups][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntryGroups]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.ListEntryGroupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_entry_groups, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEntryGroupsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_entry(self, + request: Optional[Union[datacatalog.CreateEntryRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry_id: Optional[str] = None, + entry: Optional[datacatalog.Entry] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.Entry: + r"""Creates an entry. Only entries of 'FILESET' type or + user-specified type can be created. + + Users should enable the Data Catalog API in the project + identified by the ``parent`` parameter (see [Data Catalog + Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + A maximum of 100,000 entries may be created per entry group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_create_entry(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + entry = datacatalog_v1beta1.Entry() + entry.type_ = "FILESET" + entry.integrated_system = "CLOUD_PUBSUB" + entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] + + request = datacatalog_v1beta1.CreateEntryRequest( + parent="parent_value", + entry_id="entry_id_value", + entry=entry, + ) + + # Make the request + response = await client.create_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.CreateEntryRequest, dict]]): + The request object. Request message for + [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry]. + parent (:class:`str`): + Required. The name of the entry group this entry is in. + Example: + + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} + + Note that this Entry and its child resources may not + actually be stored in the location in this name. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_id (:class:`str`): + Required. The id of the entry to + create. + + This corresponds to the ``entry_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry (:class:`google.cloud.datacatalog_v1beta1.types.Entry`): + Required. The entry to create. + This corresponds to the ``entry`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.Entry: + Entry Metadata. + A Data Catalog Entry resource represents another + resource in Google Cloud Platform (such as a BigQuery + dataset or a Pub/Sub topic), or outside of Google + Cloud Platform. Clients can use the linked_resource + field in the Entry resource to refer to the original + resource ID of the source system. + + An Entry resource contains resource details, such as + its schema. An Entry can also be used to attach + flexible metadata, such as a + [Tag][google.cloud.datacatalog.v1beta1.Tag]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entry_id, entry]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.CreateEntryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry_id is not None: + request.entry_id = entry_id + if entry is not None: + request.entry = entry + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_entry, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_entry(self, + request: Optional[Union[datacatalog.UpdateEntryRequest, dict]] = None, + *, + entry: Optional[datacatalog.Entry] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.Entry: + r"""Updates an existing entry. Users should enable the Data Catalog + API in the project identified by the ``entry.name`` parameter + (see [Data Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_update_entry(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + entry = datacatalog_v1beta1.Entry() + entry.type_ = "FILESET" + entry.integrated_system = "CLOUD_PUBSUB" + entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] + + request = datacatalog_v1beta1.UpdateEntryRequest( + entry=entry, + ) + + # Make the request + response = await client.update_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.UpdateEntryRequest, dict]]): + The request object. Request message for + [UpdateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntry]. + entry (:class:`google.cloud.datacatalog_v1beta1.types.Entry`): + Required. The updated entry. The + "name" field must be set. + + This corresponds to the ``entry`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Names of fields whose values to overwrite on an entry. + + If this parameter is absent or empty, all modifiable + fields are overwritten. If such fields are non-required + and omitted in the request body, their values are + emptied. + + The following fields are modifiable: + + - For entries with type ``DATA_STREAM``: + + - ``schema`` + + - For entries with type ``FILESET``: + + - ``schema`` + - ``display_name`` + - ``description`` + - ``gcs_fileset_spec`` + - ``gcs_fileset_spec.file_patterns`` + + - For entries with ``user_specified_type``: + + - ``schema`` + - ``display_name`` + - ``description`` + - ``user_specified_type`` + - ``user_specified_system`` + - ``linked_resource`` + - ``source_system_timestamps`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.Entry: + Entry Metadata. + A Data Catalog Entry resource represents another + resource in Google Cloud Platform (such as a BigQuery + dataset or a Pub/Sub topic), or outside of Google + Cloud Platform. Clients can use the linked_resource + field in the Entry resource to refer to the original + resource ID of the source system. + + An Entry resource contains resource details, such as + its schema. An Entry can also be used to attach + flexible metadata, such as a + [Tag][google.cloud.datacatalog.v1beta1.Tag]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([entry, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.UpdateEntryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entry is not None: + request.entry = entry + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_entry, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entry.name", request.entry.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_entry(self, + request: Optional[Union[datacatalog.DeleteEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an existing entry. Only entries created through + [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry] + method can be deleted. Users should enable the Data Catalog API + in the project identified by the ``name`` parameter (see [Data + Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_delete_entry(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteEntryRequest( + name="name_value", + ) + + # Make the request + await client.delete_entry(request=request) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.DeleteEntryRequest, dict]]): + The request object. Request message for + [DeleteEntry][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntry]. + name (:class:`str`): + Required. The name of the entry. Example: + + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.DeleteEntryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_entry, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_entry(self, + request: Optional[Union[datacatalog.GetEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.Entry: + r"""Gets an entry. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_get_entry(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.GetEntryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.GetEntryRequest, dict]]): + The request object. Request message for + [GetEntry][google.cloud.datacatalog.v1beta1.DataCatalog.GetEntry]. + name (:class:`str`): + Required. The name of the entry. Example: + + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.Entry: + Entry Metadata. + A Data Catalog Entry resource represents another + resource in Google Cloud Platform (such as a BigQuery + dataset or a Pub/Sub topic), or outside of Google + Cloud Platform. Clients can use the linked_resource + field in the Entry resource to refer to the original + resource ID of the source system. + + An Entry resource contains resource details, such as + its schema. An Entry can also be used to attach + flexible metadata, such as a + [Tag][google.cloud.datacatalog.v1beta1.Tag]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.GetEntryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_entry, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def lookup_entry(self, + request: Optional[Union[datacatalog.LookupEntryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.Entry: + r"""Get an entry by target resource name. This method + allows clients to use the resource name from the source + Google Cloud Platform service to get the Data Catalog + Entry. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_lookup_entry(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.LookupEntryRequest( + linked_resource="linked_resource_value", + ) + + # Make the request + response = await client.lookup_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.LookupEntryRequest, dict]]): + The request object. Request message for + [LookupEntry][google.cloud.datacatalog.v1beta1.DataCatalog.LookupEntry]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.Entry: + Entry Metadata. + A Data Catalog Entry resource represents another + resource in Google Cloud Platform (such as a BigQuery + dataset or a Pub/Sub topic), or outside of Google + Cloud Platform. Clients can use the linked_resource + field in the Entry resource to refer to the original + resource ID of the source system. + + An Entry resource contains resource details, such as + its schema. An Entry can also be used to attach + flexible metadata, such as a + [Tag][google.cloud.datacatalog.v1beta1.Tag]. + + """ + # Create or coerce a protobuf request object. + request = datacatalog.LookupEntryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.lookup_entry, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_entries(self, + request: Optional[Union[datacatalog.ListEntriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntriesAsyncPager: + r"""Lists entries. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_list_entries(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ListEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entries(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.ListEntriesRequest, dict]]): + The request object. Request message for + [ListEntries][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntries]. + parent (:class:`str`): + Required. The name of the entry group that contains the + entries, which can be provided in URL format. Example: + + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListEntriesAsyncPager: + Response message for + [ListEntries][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntries]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.ListEntriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_entries, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEntriesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_tag_template(self, + request: Optional[Union[datacatalog.CreateTagTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + tag_template_id: Optional[str] = None, + tag_template: Optional[tags.TagTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplate: + r"""Creates a tag template. The user should enable the Data Catalog + API in the project identified by the ``parent`` parameter (see + `Data Catalog Resource + Project `__ + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_create_tag_template(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.CreateTagTemplateRequest( + parent="parent_value", + tag_template_id="tag_template_id_value", + ) + + # Make the request + response = await client.create_tag_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.CreateTagTemplateRequest, dict]]): + The request object. Request message for + [CreateTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplate]. + parent (:class:`str`): + Required. The name of the project and the template + location + [region](https://cloud.google.com/data-catalog/docs/concepts/regions. + + Example: + + - projects/{project_id}/locations/us-central1 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag_template_id (:class:`str`): + Required. The id of the tag template + to create. + + This corresponds to the ``tag_template_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag_template (:class:`google.cloud.datacatalog_v1beta1.types.TagTemplate`): + Required. The tag template to create. + This corresponds to the ``tag_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.TagTemplate: + A tag template defines a tag, which can have one or more typed fields. + The template is used to create and attach the tag to + Google Cloud resources. [Tag template + roles](\ https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) + provide permissions to create, edit, and use the + template. See, for example, the [TagTemplate + User](\ https://cloud.google.com/data-catalog/docs/how-to/template-user) + role, which includes permission to use the tag + template to tag resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, tag_template_id, tag_template]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.CreateTagTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if tag_template_id is not None: + request.tag_template_id = tag_template_id + if tag_template is not None: + request.tag_template = tag_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_tag_template, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_tag_template(self, + request: Optional[Union[datacatalog.GetTagTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplate: + r"""Gets a tag template. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_get_tag_template(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.GetTagTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_tag_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.GetTagTemplateRequest, dict]]): + The request object. Request message for + [GetTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.GetTagTemplate]. + name (:class:`str`): + Required. The name of the tag template. Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.TagTemplate: + A tag template defines a tag, which can have one or more typed fields. + The template is used to create and attach the tag to + Google Cloud resources. [Tag template + roles](\ https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) + provide permissions to create, edit, and use the + template. See, for example, the [TagTemplate + User](\ https://cloud.google.com/data-catalog/docs/how-to/template-user) + role, which includes permission to use the tag + template to tag resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.GetTagTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_tag_template, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_tag_template(self, + request: Optional[Union[datacatalog.UpdateTagTemplateRequest, dict]] = None, + *, + tag_template: Optional[tags.TagTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplate: + r"""Updates a tag template. This method cannot be used to update the + fields of a template. The tag template fields are represented as + separate resources and should be updated using their own + create/update/delete methods. Users should enable the Data + Catalog API in the project identified by the + ``tag_template.name`` parameter (see [Data Catalog Resource + Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_update_tag_template(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.UpdateTagTemplateRequest( + ) + + # Make the request + response = await client.update_tag_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.UpdateTagTemplateRequest, dict]]): + The request object. Request message for + [UpdateTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplate]. + tag_template (:class:`google.cloud.datacatalog_v1beta1.types.TagTemplate`): + Required. The template to update. The + "name" field must be set. + + This corresponds to the ``tag_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Names of fields whose values to overwrite on a tag + template. Currently, only ``display_name`` can be + overwritten. + + In general, if this parameter is absent or empty, all + modifiable fields are overwritten. If such fields are + non-required and omitted in the request body, their + values are emptied. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.TagTemplate: + A tag template defines a tag, which can have one or more typed fields. + The template is used to create and attach the tag to + Google Cloud resources. [Tag template + roles](\ https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) + provide permissions to create, edit, and use the + template. See, for example, the [TagTemplate + User](\ https://cloud.google.com/data-catalog/docs/how-to/template-user) + role, which includes permission to use the tag + template to tag resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([tag_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.UpdateTagTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if tag_template is not None: + request.tag_template = tag_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_tag_template, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("tag_template.name", request.tag_template.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_tag_template(self, + request: Optional[Union[datacatalog.DeleteTagTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + force: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a tag template and all tags using the template. Users + should enable the Data Catalog API in the project identified by + the ``name`` parameter (see [Data Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_delete_tag_template(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteTagTemplateRequest( + name="name_value", + force=True, + ) + + # Make the request + await client.delete_tag_template(request=request) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.DeleteTagTemplateRequest, dict]]): + The request object. Request message for + [DeleteTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplate]. + name (:class:`str`): + Required. The name of the tag template to delete. + Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + force (:class:`bool`): + Required. Currently, this field must always be set to + ``true``. This confirms the deletion of any possible + tags using this template. ``force = false`` will be + supported in the future. + + This corresponds to the ``force`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, force]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.DeleteTagTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if force is not None: + request.force = force + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_tag_template, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_tag_template_field(self, + request: Optional[Union[datacatalog.CreateTagTemplateFieldRequest, dict]] = None, + *, + parent: Optional[str] = None, + tag_template_field_id: Optional[str] = None, + tag_template_field: Optional[tags.TagTemplateField] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplateField: + r"""Creates a field in a tag template. The user should enable the + Data Catalog API in the project identified by the ``parent`` + parameter (see `Data Catalog Resource + Project `__ + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_create_tag_template_field(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + tag_template_field = datacatalog_v1beta1.TagTemplateField() + tag_template_field.type_.primitive_type = "TIMESTAMP" + + request = datacatalog_v1beta1.CreateTagTemplateFieldRequest( + parent="parent_value", + tag_template_field_id="tag_template_field_id_value", + tag_template_field=tag_template_field, + ) + + # Make the request + response = await client.create_tag_template_field(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.CreateTagTemplateFieldRequest, dict]]): + The request object. Request message for + [CreateTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplateField]. + parent (:class:`str`): + Required. The name of the project and the template + location + `region `__. + + Example: + + - projects/{project_id}/locations/us-central1/tagTemplates/{tag_template_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag_template_field_id (:class:`str`): + Required. The ID of the tag template field to create. + Field ids can contain letters (both uppercase and + lowercase), numbers (0-9), underscores (_) and dashes + (-). Field IDs must be at least 1 character long and at + most 128 characters long. Field IDs must also be unique + within their template. + + This corresponds to the ``tag_template_field_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag_template_field (:class:`google.cloud.datacatalog_v1beta1.types.TagTemplateField`): + Required. The tag template field to + create. + + This corresponds to the ``tag_template_field`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.TagTemplateField: + The template for an individual field + within a tag template. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, tag_template_field_id, tag_template_field]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.CreateTagTemplateFieldRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if tag_template_field_id is not None: + request.tag_template_field_id = tag_template_field_id + if tag_template_field is not None: + request.tag_template_field = tag_template_field + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_tag_template_field, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_tag_template_field(self, + request: Optional[Union[datacatalog.UpdateTagTemplateFieldRequest, dict]] = None, + *, + name: Optional[str] = None, + tag_template_field: Optional[tags.TagTemplateField] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplateField: + r"""Updates a field in a tag template. This method cannot be used to + update the field type. Users should enable the Data Catalog API + in the project identified by the ``name`` parameter (see [Data + Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_update_tag_template_field(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + tag_template_field = datacatalog_v1beta1.TagTemplateField() + tag_template_field.type_.primitive_type = "TIMESTAMP" + + request = datacatalog_v1beta1.UpdateTagTemplateFieldRequest( + name="name_value", + tag_template_field=tag_template_field, + ) + + # Make the request + response = await client.update_tag_template_field(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.UpdateTagTemplateFieldRequest, dict]]): + The request object. Request message for + [UpdateTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplateField]. + name (:class:`str`): + Required. The name of the tag template field. Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag_template_field (:class:`google.cloud.datacatalog_v1beta1.types.TagTemplateField`): + Required. The template to update. + This corresponds to the ``tag_template_field`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Names of fields whose values to overwrite on + an individual field of a tag template. The following + fields are modifiable: + + - ``display_name`` + - ``type.enum_type`` + - ``is_required`` + + If this parameter is absent or empty, all modifiable + fields are overwritten. If such fields are non-required + and omitted in the request body, their values are + emptied with one exception: when updating an enum type, + the provided values are merged with the existing values. + Therefore, enum values can only be added, existing enum + values cannot be deleted or renamed. + + Additionally, updating a template field from optional to + required is *not* allowed. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.TagTemplateField: + The template for an individual field + within a tag template. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, tag_template_field, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.UpdateTagTemplateFieldRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if tag_template_field is not None: + request.tag_template_field = tag_template_field + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_tag_template_field, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def rename_tag_template_field(self, + request: Optional[Union[datacatalog.RenameTagTemplateFieldRequest, dict]] = None, + *, + name: Optional[str] = None, + new_tag_template_field_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplateField: + r"""Renames a field in a tag template. The user should enable the + Data Catalog API in the project identified by the ``name`` + parameter (see `Data Catalog Resource + Project `__ + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_rename_tag_template_field(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.RenameTagTemplateFieldRequest( + name="name_value", + new_tag_template_field_id="new_tag_template_field_id_value", + ) + + # Make the request + response = await client.rename_tag_template_field(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.RenameTagTemplateFieldRequest, dict]]): + The request object. Request message for + [RenameTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.RenameTagTemplateField]. + name (:class:`str`): + Required. The name of the tag template. Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_tag_template_field_id (:class:`str`): + Required. The new ID of this tag template field. For + example, ``my_new_field``. + + This corresponds to the ``new_tag_template_field_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.TagTemplateField: + The template for an individual field + within a tag template. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, new_tag_template_field_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.RenameTagTemplateFieldRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if new_tag_template_field_id is not None: + request.new_tag_template_field_id = new_tag_template_field_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.rename_tag_template_field, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def rename_tag_template_field_enum_value(self, + request: Optional[Union[datacatalog.RenameTagTemplateFieldEnumValueRequest, dict]] = None, + *, + name: Optional[str] = None, + new_enum_value_display_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplateField: + r"""Renames an enum value in a tag template. The enum + values have to be unique within one enum field. Thus, an + enum value cannot be renamed with a name used in any + other enum value within the same enum field. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_rename_tag_template_field_enum_value(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.RenameTagTemplateFieldEnumValueRequest( + name="name_value", + new_enum_value_display_name="new_enum_value_display_name_value", + ) + + # Make the request + response = await client.rename_tag_template_field_enum_value(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.RenameTagTemplateFieldEnumValueRequest, dict]]): + The request object. Request message for + [RenameTagTemplateFieldEnumValue][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateFieldEnumValue]. + name (:class:`str`): + Required. The name of the enum field value. Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_enum_value_display_name (:class:`str`): + Required. The new display name of the enum value. For + example, ``my_new_enum_value``. + + This corresponds to the ``new_enum_value_display_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.TagTemplateField: + The template for an individual field + within a tag template. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, new_enum_value_display_name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.RenameTagTemplateFieldEnumValueRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if new_enum_value_display_name is not None: + request.new_enum_value_display_name = new_enum_value_display_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.rename_tag_template_field_enum_value, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_tag_template_field(self, + request: Optional[Union[datacatalog.DeleteTagTemplateFieldRequest, dict]] = None, + *, + name: Optional[str] = None, + force: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a field in a tag template and all uses of that field. + Users should enable the Data Catalog API in the project + identified by the ``name`` parameter (see [Data Catalog Resource + Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_delete_tag_template_field(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteTagTemplateFieldRequest( + name="name_value", + force=True, + ) + + # Make the request + await client.delete_tag_template_field(request=request) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.DeleteTagTemplateFieldRequest, dict]]): + The request object. Request message for + [DeleteTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplateField]. + name (:class:`str`): + Required. The name of the tag template field to delete. + Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + force (:class:`bool`): + Required. Currently, this field must always be set to + ``true``. This confirms the deletion of this field from + any tags using this field. ``force = false`` will be + supported in the future. + + This corresponds to the ``force`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, force]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.DeleteTagTemplateFieldRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if force is not None: + request.force = force + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_tag_template_field, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_tag(self, + request: Optional[Union[datacatalog.CreateTagRequest, dict]] = None, + *, + parent: Optional[str] = None, + tag: Optional[tags.Tag] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.Tag: + r"""Creates a tag on an + [Entry][google.cloud.datacatalog.v1beta1.Entry]. Note: The + project identified by the ``parent`` parameter for the + `tag `__ + and the `tag + template `__ + used to create the tag must be from the same organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_create_tag(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + tag = datacatalog_v1beta1.Tag() + tag.column = "column_value" + tag.template = "template_value" + + request = datacatalog_v1beta1.CreateTagRequest( + parent="parent_value", + tag=tag, + ) + + # Make the request + response = await client.create_tag(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.CreateTagRequest, dict]]): + The request object. Request message for + [CreateTag][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTag]. + parent (:class:`str`): + Required. The name of the resource to attach this tag + to. Tags can be attached to Entries. Example: + + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + + Note that this Tag and its child resources may not + actually be stored in the location in this name. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag (:class:`google.cloud.datacatalog_v1beta1.types.Tag`): + Required. The tag to create. + This corresponds to the ``tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.Tag: + Tags are used to attach custom metadata to Data Catalog resources. Tags + conform to the specifications within their tag + template. + + See [Data Catalog + IAM](\ https://cloud.google.com/data-catalog/docs/concepts/iam) + for information on the permissions needed to create + or view tags. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, tag]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.CreateTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if tag is not None: + request.tag = tag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_tag, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_tag(self, + request: Optional[Union[datacatalog.UpdateTagRequest, dict]] = None, + *, + tag: Optional[tags.Tag] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.Tag: + r"""Updates an existing tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_update_tag(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + tag = datacatalog_v1beta1.Tag() + tag.column = "column_value" + tag.template = "template_value" + + request = datacatalog_v1beta1.UpdateTagRequest( + tag=tag, + ) + + # Make the request + response = await client.update_tag(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.UpdateTagRequest, dict]]): + The request object. Request message for + [UpdateTag][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTag]. + tag (:class:`google.cloud.datacatalog_v1beta1.types.Tag`): + Required. The updated tag. The "name" + field must be set. + + This corresponds to the ``tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Note: Currently, this parameter can only take + ``"fields"`` as value. + + Names of fields whose values to overwrite on a tag. + Currently, a tag has the only modifiable field with the + name ``fields``. + + In general, if this parameter is absent or empty, all + modifiable fields are overwritten. If such fields are + non-required and omitted in the request body, their + values are emptied. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.Tag: + Tags are used to attach custom metadata to Data Catalog resources. Tags + conform to the specifications within their tag + template. + + See [Data Catalog + IAM](\ https://cloud.google.com/data-catalog/docs/concepts/iam) + for information on the permissions needed to create + or view tags. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([tag, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.UpdateTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if tag is not None: + request.tag = tag + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_tag, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("tag.name", request.tag.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_tag(self, + request: Optional[Union[datacatalog.DeleteTagRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_delete_tag(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteTagRequest( + name="name_value", + ) + + # Make the request + await client.delete_tag(request=request) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.DeleteTagRequest, dict]]): + The request object. Request message for + [DeleteTag][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTag]. + name (:class:`str`): + Required. The name of the tag to delete. Example: + + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.DeleteTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_tag, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_tags(self, + request: Optional[Union[datacatalog.ListTagsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTagsAsyncPager: + r"""Lists tags assigned to an + [Entry][google.cloud.datacatalog.v1beta1.Entry]. The + [columns][google.cloud.datacatalog.v1beta1.Tag.column] in the + response are lowercased. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_list_tags(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ListTagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tags(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.ListTagsRequest, dict]]): + The request object. Request message for + [ListTags][google.cloud.datacatalog.v1beta1.DataCatalog.ListTags]. + parent (:class:`str`): + Required. The name of the Data Catalog resource to list + the tags of. The resource could be an + [Entry][google.cloud.datacatalog.v1beta1.Entry] or an + [EntryGroup][google.cloud.datacatalog.v1beta1.EntryGroup]. + + Examples: + + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListTagsAsyncPager: + Response message for + [ListTags][google.cloud.datacatalog.v1beta1.DataCatalog.ListTags]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = datacatalog.ListTagsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_tags, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTagsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy(self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy for a resource. Replaces any + existing policy. Supported resources are: + + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage + policies for BigQuery, Pub/Sub and any external Google Cloud + Platform resources synced to Data Catalog. + + Callers must have following Google IAM permission + + - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on + tag templates. + - ``datacatalog.entries.setIamPolicy`` to set policies on + entries. + - ``datacatalog.entryGroups.setIamPolicy`` to set policies on + entry groups. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_set_iam_policy(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): + The request object. Request message for ``SetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy(self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a resource. A ``NOT_FOUND`` + error is returned if the resource does not exist. An empty + policy is returned if the resource exists but does not have a + policy set on it. + + Supported resources are: + + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage + policies for BigQuery, Pub/Sub and any external Google Cloud + Platform resources synced to Data Catalog. + + Callers must have following Google IAM permission + + - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on + tag templates. + - ``datacatalog.entries.getIamPolicy`` to get policies on + entries. + - ``datacatalog.entryGroups.getIamPolicy`` to get policies on + entry groups. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_get_iam_policy(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): + The request object. Request message for ``GetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource, ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions(self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns the caller's permissions on a resource. If the resource + does not exist, an empty set of permissions is returned (We + don't return a ``NOT_FOUND`` error). + + Supported resources are: + + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage + policies for BigQuery, Pub/Sub and any external Google Cloud + Platform resources synced to Data Catalog. + + A caller is not required to have Google IAM permission to make + this request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_test_iam_permissions(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "DataCatalogAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "DataCatalogAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py new file mode 100644 index 000000000000..b7bd21b66698 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py @@ -0,0 +1,3904 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.datacatalog_v1beta1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.datacatalog_v1beta1.services.data_catalog import pagers +from google.cloud.datacatalog_v1beta1.types import common +from google.cloud.datacatalog_v1beta1.types import datacatalog +from google.cloud.datacatalog_v1beta1.types import gcs_fileset_spec +from google.cloud.datacatalog_v1beta1.types import schema +from google.cloud.datacatalog_v1beta1.types import search +from google.cloud.datacatalog_v1beta1.types import table_spec +from google.cloud.datacatalog_v1beta1.types import tags +from google.cloud.datacatalog_v1beta1.types import timestamps +from google.cloud.datacatalog_v1beta1.types import usage +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import DataCatalogGrpcTransport +from .transports.grpc_asyncio import DataCatalogGrpcAsyncIOTransport + + +class DataCatalogClientMeta(type): + """Metaclass for the DataCatalog client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[DataCatalogTransport]] + _transport_registry["grpc"] = DataCatalogGrpcTransport + _transport_registry["grpc_asyncio"] = DataCatalogGrpcAsyncIOTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[DataCatalogTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DataCatalogClient(metaclass=DataCatalogClientMeta): + """Data Catalog API service allows clients to discover, + understand, and manage their data. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "datacatalog.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataCatalogClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataCatalogClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DataCatalogTransport: + """Returns the transport used by the client instance. + + Returns: + DataCatalogTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def entry_path(project: str,location: str,entry_group: str,entry: str,) -> str: + """Returns a fully-qualified entry string.""" + return "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format(project=project, location=location, entry_group=entry_group, entry=entry, ) + + @staticmethod + def parse_entry_path(path: str) -> Dict[str,str]: + """Parses a entry path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)/entries/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def entry_group_path(project: str,location: str,entry_group: str,) -> str: + """Returns a fully-qualified entry_group string.""" + return "projects/{project}/locations/{location}/entryGroups/{entry_group}".format(project=project, location=location, entry_group=entry_group, ) + + @staticmethod + def parse_entry_group_path(path: str) -> Dict[str,str]: + """Parses a entry_group path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def tag_path(project: str,location: str,entry_group: str,entry: str,tag: str,) -> str: + """Returns a fully-qualified tag string.""" + return "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format(project=project, location=location, entry_group=entry_group, entry=entry, tag=tag, ) + + @staticmethod + def parse_tag_path(path: str) -> Dict[str,str]: + """Parses a tag path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)/entries/(?P.+?)/tags/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def tag_template_path(project: str,location: str,tag_template: str,) -> str: + """Returns a fully-qualified tag_template string.""" + return "projects/{project}/locations/{location}/tagTemplates/{tag_template}".format(project=project, location=location, tag_template=tag_template, ) + + @staticmethod + def parse_tag_template_path(path: str) -> Dict[str,str]: + """Parses a tag_template path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/tagTemplates/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def tag_template_field_path(project: str,location: str,tag_template: str,field: str,) -> str: + """Returns a fully-qualified tag_template_field string.""" + return "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}".format(project=project, location=location, tag_template=tag_template, field=field, ) + + @staticmethod + def parse_tag_template_field_path(path: str) -> Dict[str,str]: + """Parses a tag_template_field path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/tagTemplates/(?P.+?)/fields/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def tag_template_field_enum_value_path(project: str,location: str,tag_template: str,tag_template_field_id: str,enum_value_display_name: str,) -> str: + """Returns a fully-qualified tag_template_field_enum_value string.""" + return "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name}".format(project=project, location=location, tag_template=tag_template, tag_template_field_id=tag_template_field_id, enum_value_display_name=enum_value_display_name, ) + + @staticmethod + def parse_tag_template_field_enum_value_path(path: str) -> Dict[str,str]: + """Parses a tag_template_field_enum_value path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/tagTemplates/(?P.+?)/fields/(?P.+?)/enumValues/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, DataCatalogTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data catalog client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, DataCatalogTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DataCatalogTransport): + # transport is a DataCatalogTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def search_catalog(self, + request: Optional[Union[datacatalog.SearchCatalogRequest, dict]] = None, + *, + scope: Optional[datacatalog.SearchCatalogRequest.Scope] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchCatalogPager: + r"""Searches Data Catalog for multiple resources like entries, tags + that match a query. + + This is a custom method + (https://cloud.google.com/apis/design/custom_methods) and does + not return the complete resource, only the resource identifier + and high level fields. Clients can subsequently call ``Get`` + methods. + + Note that Data Catalog search queries do not guarantee full + recall. Query results that match your query may not be returned, + even in subsequent result pages. Also note that results returned + (and not returned) can vary across repeated search queries. + + See `Data Catalog Search + Syntax `__ + for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_search_catalog(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.SearchCatalogRequest( + ) + + # Make the request + page_result = client.search_catalog(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.SearchCatalogRequest, dict]): + The request object. Request message for + [SearchCatalog][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog]. + scope (google.cloud.datacatalog_v1beta1.types.SearchCatalogRequest.Scope): + Required. The scope of this search request. A ``scope`` + that has empty ``include_org_ids``, + ``include_project_ids`` AND false + ``include_gcp_public_datasets`` is considered invalid. + Data Catalog will return an error in such a case. + + This corresponds to the ``scope`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (str): + Optional. The query string in search query syntax. An + empty query string will result in all data assets (in + the specified scope) that the user has access to. Query + strings can be simple as "x" or more qualified as: + + - name:x + - column:x + - description:y + + Note: Query tokens need to have a minimum of 3 + characters for substring matching to work correctly. See + `Data Catalog Search + Syntax `__ + for more information. + + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.SearchCatalogPager: + Response message for + [SearchCatalog][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([scope, query]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.SearchCatalogRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.SearchCatalogRequest): + request = datacatalog.SearchCatalogRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if scope is not None: + request.scope = scope + if query is not None: + request.query = query + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search_catalog] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.SearchCatalogPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_entry_group(self, + request: Optional[Union[datacatalog.CreateEntryGroupRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry_group_id: Optional[str] = None, + entry_group: Optional[datacatalog.EntryGroup] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.EntryGroup: + r"""A maximum of 10,000 entry groups may be created per organization + across all locations. + + Users should enable the Data Catalog API in the project + identified by the ``parent`` parameter (see [Data Catalog + Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_create_entry_group(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.CreateEntryGroupRequest( + parent="parent_value", + entry_group_id="entry_group_id_value", + ) + + # Make the request + response = client.create_entry_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.CreateEntryGroupRequest, dict]): + The request object. Request message for + [CreateEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntryGroup]. + parent (str): + Required. The name of the project this entry group is + in. Example: + + - projects/{project_id}/locations/{location} + + Note that this EntryGroup and its child resources may + not actually be stored in the location in this name. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_group_id (str): + Required. The id of the entry group + to create. The id must begin with a + letter or underscore, contain only + English letters, numbers and + underscores, and be at most 64 + characters. + + This corresponds to the ``entry_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_group (google.cloud.datacatalog_v1beta1.types.EntryGroup): + The entry group to create. Defaults + to an empty entry group. + + This corresponds to the ``entry_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.EntryGroup: + EntryGroup Metadata. + An EntryGroup resource represents a logical grouping + of zero or more Data Catalog + [Entry][google.cloud.datacatalog.v1beta1.Entry] + resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entry_group_id, entry_group]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.CreateEntryGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.CreateEntryGroupRequest): + request = datacatalog.CreateEntryGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry_group_id is not None: + request.entry_group_id = entry_group_id + if entry_group is not None: + request.entry_group = entry_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_entry_group(self, + request: Optional[Union[datacatalog.UpdateEntryGroupRequest, dict]] = None, + *, + entry_group: Optional[datacatalog.EntryGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.EntryGroup: + r"""Updates an EntryGroup. The user should enable the Data Catalog + API in the project identified by the ``entry_group.name`` + parameter (see [Data Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_update_entry_group(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.UpdateEntryGroupRequest( + ) + + # Make the request + response = client.update_entry_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.UpdateEntryGroupRequest, dict]): + The request object. Request message for + [UpdateEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntryGroup]. + entry_group (google.cloud.datacatalog_v1beta1.types.EntryGroup): + Required. The updated entry group. + "name" field must be set. + + This corresponds to the ``entry_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Names of fields whose values to + overwrite on an entry group. + If this parameter is absent or empty, + all modifiable fields are overwritten. + If such fields are non-required and + omitted in the request body, their + values are emptied. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.EntryGroup: + EntryGroup Metadata. + An EntryGroup resource represents a logical grouping + of zero or more Data Catalog + [Entry][google.cloud.datacatalog.v1beta1.Entry] + resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([entry_group, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.UpdateEntryGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.UpdateEntryGroupRequest): + request = datacatalog.UpdateEntryGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entry_group is not None: + request.entry_group = entry_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entry_group.name", request.entry_group.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_entry_group(self, + request: Optional[Union[datacatalog.GetEntryGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + read_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.EntryGroup: + r"""Gets an EntryGroup. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_get_entry_group(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.GetEntryGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.GetEntryGroupRequest, dict]): + The request object. Request message for + [GetEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.GetEntryGroup]. + name (str): + Required. The name of the entry group. For example, + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + read_mask (google.protobuf.field_mask_pb2.FieldMask): + The fields to return. If not set or + empty, all fields are returned. + + This corresponds to the ``read_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.EntryGroup: + EntryGroup Metadata. + An EntryGroup resource represents a logical grouping + of zero or more Data Catalog + [Entry][google.cloud.datacatalog.v1beta1.Entry] + resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, read_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.GetEntryGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.GetEntryGroupRequest): + request = datacatalog.GetEntryGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if read_mask is not None: + request.read_mask = read_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_entry_group(self, + request: Optional[Union[datacatalog.DeleteEntryGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an EntryGroup. Only entry groups that do not contain + entries can be deleted. Users should enable the Data Catalog API + in the project identified by the ``name`` parameter (see [Data + Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_delete_entry_group(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteEntryGroupRequest( + name="name_value", + ) + + # Make the request + client.delete_entry_group(request=request) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.DeleteEntryGroupRequest, dict]): + The request object. Request message for + [DeleteEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntryGroup]. + name (str): + Required. The name of the entry group. For example, + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.DeleteEntryGroupRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.DeleteEntryGroupRequest): + request = datacatalog.DeleteEntryGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_entry_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_entry_groups(self, + request: Optional[Union[datacatalog.ListEntryGroupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntryGroupsPager: + r"""Lists entry groups. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_list_entry_groups(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ListEntryGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.ListEntryGroupsRequest, dict]): + The request object. Request message for + [ListEntryGroups][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntryGroups]. + parent (str): + Required. The name of the location that contains the + entry groups, which can be provided in URL format. + Example: + + - projects/{project_id}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListEntryGroupsPager: + Response message for + [ListEntryGroups][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntryGroups]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.ListEntryGroupsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.ListEntryGroupsRequest): + request = datacatalog.ListEntryGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_entry_groups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEntryGroupsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_entry(self, + request: Optional[Union[datacatalog.CreateEntryRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry_id: Optional[str] = None, + entry: Optional[datacatalog.Entry] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.Entry: + r"""Creates an entry. Only entries of 'FILESET' type or + user-specified type can be created. + + Users should enable the Data Catalog API in the project + identified by the ``parent`` parameter (see [Data Catalog + Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + A maximum of 100,000 entries may be created per entry group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_create_entry(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + entry = datacatalog_v1beta1.Entry() + entry.type_ = "FILESET" + entry.integrated_system = "CLOUD_PUBSUB" + entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] + + request = datacatalog_v1beta1.CreateEntryRequest( + parent="parent_value", + entry_id="entry_id_value", + entry=entry, + ) + + # Make the request + response = client.create_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.CreateEntryRequest, dict]): + The request object. Request message for + [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry]. + parent (str): + Required. The name of the entry group this entry is in. + Example: + + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} + + Note that this Entry and its child resources may not + actually be stored in the location in this name. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_id (str): + Required. The id of the entry to + create. + + This corresponds to the ``entry_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry (google.cloud.datacatalog_v1beta1.types.Entry): + Required. The entry to create. + This corresponds to the ``entry`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.Entry: + Entry Metadata. + A Data Catalog Entry resource represents another + resource in Google Cloud Platform (such as a BigQuery + dataset or a Pub/Sub topic), or outside of Google + Cloud Platform. Clients can use the linked_resource + field in the Entry resource to refer to the original + resource ID of the source system. + + An Entry resource contains resource details, such as + its schema. An Entry can also be used to attach + flexible metadata, such as a + [Tag][google.cloud.datacatalog.v1beta1.Tag]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entry_id, entry]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.CreateEntryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.CreateEntryRequest): + request = datacatalog.CreateEntryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry_id is not None: + request.entry_id = entry_id + if entry is not None: + request.entry = entry + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_entry(self, + request: Optional[Union[datacatalog.UpdateEntryRequest, dict]] = None, + *, + entry: Optional[datacatalog.Entry] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.Entry: + r"""Updates an existing entry. Users should enable the Data Catalog + API in the project identified by the ``entry.name`` parameter + (see [Data Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_update_entry(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + entry = datacatalog_v1beta1.Entry() + entry.type_ = "FILESET" + entry.integrated_system = "CLOUD_PUBSUB" + entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] + + request = datacatalog_v1beta1.UpdateEntryRequest( + entry=entry, + ) + + # Make the request + response = client.update_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.UpdateEntryRequest, dict]): + The request object. Request message for + [UpdateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntry]. + entry (google.cloud.datacatalog_v1beta1.types.Entry): + Required. The updated entry. The + "name" field must be set. + + This corresponds to the ``entry`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Names of fields whose values to overwrite on an entry. + + If this parameter is absent or empty, all modifiable + fields are overwritten. If such fields are non-required + and omitted in the request body, their values are + emptied. + + The following fields are modifiable: + + - For entries with type ``DATA_STREAM``: + + - ``schema`` + + - For entries with type ``FILESET``: + + - ``schema`` + - ``display_name`` + - ``description`` + - ``gcs_fileset_spec`` + - ``gcs_fileset_spec.file_patterns`` + + - For entries with ``user_specified_type``: + + - ``schema`` + - ``display_name`` + - ``description`` + - ``user_specified_type`` + - ``user_specified_system`` + - ``linked_resource`` + - ``source_system_timestamps`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.Entry: + Entry Metadata. + A Data Catalog Entry resource represents another + resource in Google Cloud Platform (such as a BigQuery + dataset or a Pub/Sub topic), or outside of Google + Cloud Platform. Clients can use the linked_resource + field in the Entry resource to refer to the original + resource ID of the source system. + + An Entry resource contains resource details, such as + its schema. An Entry can also be used to attach + flexible metadata, such as a + [Tag][google.cloud.datacatalog.v1beta1.Tag]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([entry, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.UpdateEntryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.UpdateEntryRequest): + request = datacatalog.UpdateEntryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if entry is not None: + request.entry = entry + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("entry.name", request.entry.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_entry(self, + request: Optional[Union[datacatalog.DeleteEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an existing entry. Only entries created through + [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry] + method can be deleted. Users should enable the Data Catalog API + in the project identified by the ``name`` parameter (see [Data + Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_delete_entry(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteEntryRequest( + name="name_value", + ) + + # Make the request + client.delete_entry(request=request) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.DeleteEntryRequest, dict]): + The request object. Request message for + [DeleteEntry][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntry]. + name (str): + Required. The name of the entry. Example: + + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.DeleteEntryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.DeleteEntryRequest): + request = datacatalog.DeleteEntryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_entry(self, + request: Optional[Union[datacatalog.GetEntryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.Entry: + r"""Gets an entry. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_get_entry(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.GetEntryRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.GetEntryRequest, dict]): + The request object. Request message for + [GetEntry][google.cloud.datacatalog.v1beta1.DataCatalog.GetEntry]. + name (str): + Required. The name of the entry. Example: + + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.Entry: + Entry Metadata. + A Data Catalog Entry resource represents another + resource in Google Cloud Platform (such as a BigQuery + dataset or a Pub/Sub topic), or outside of Google + Cloud Platform. Clients can use the linked_resource + field in the Entry resource to refer to the original + resource ID of the source system. + + An Entry resource contains resource details, such as + its schema. An Entry can also be used to attach + flexible metadata, such as a + [Tag][google.cloud.datacatalog.v1beta1.Tag]. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.GetEntryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.GetEntryRequest): + request = datacatalog.GetEntryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_entry] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def lookup_entry(self, + request: Optional[Union[datacatalog.LookupEntryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.Entry: + r"""Get an entry by target resource name. This method + allows clients to use the resource name from the source + Google Cloud Platform service to get the Data Catalog + Entry. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_lookup_entry(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.LookupEntryRequest( + linked_resource="linked_resource_value", + ) + + # Make the request + response = client.lookup_entry(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.LookupEntryRequest, dict]): + The request object. Request message for + [LookupEntry][google.cloud.datacatalog.v1beta1.DataCatalog.LookupEntry]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.Entry: + Entry Metadata. + A Data Catalog Entry resource represents another + resource in Google Cloud Platform (such as a BigQuery + dataset or a Pub/Sub topic), or outside of Google + Cloud Platform. Clients can use the linked_resource + field in the Entry resource to refer to the original + resource ID of the source system. + + An Entry resource contains resource details, such as + its schema. An Entry can also be used to attach + flexible metadata, such as a + [Tag][google.cloud.datacatalog.v1beta1.Tag]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.LookupEntryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.LookupEntryRequest): + request = datacatalog.LookupEntryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.lookup_entry] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_entries(self, + request: Optional[Union[datacatalog.ListEntriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEntriesPager: + r"""Lists entries. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_list_entries(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ListEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.ListEntriesRequest, dict]): + The request object. Request message for + [ListEntries][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntries]. + parent (str): + Required. The name of the entry group that contains the + entries, which can be provided in URL format. Example: + + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListEntriesPager: + Response message for + [ListEntries][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntries]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.ListEntriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.ListEntriesRequest): + request = datacatalog.ListEntriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_entries] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEntriesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_tag_template(self, + request: Optional[Union[datacatalog.CreateTagTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + tag_template_id: Optional[str] = None, + tag_template: Optional[tags.TagTemplate] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplate: + r"""Creates a tag template. The user should enable the Data Catalog + API in the project identified by the ``parent`` parameter (see + `Data Catalog Resource + Project `__ + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_create_tag_template(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.CreateTagTemplateRequest( + parent="parent_value", + tag_template_id="tag_template_id_value", + ) + + # Make the request + response = client.create_tag_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.CreateTagTemplateRequest, dict]): + The request object. Request message for + [CreateTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplate]. + parent (str): + Required. The name of the project and the template + location + [region](https://cloud.google.com/data-catalog/docs/concepts/regions. + + Example: + + - projects/{project_id}/locations/us-central1 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag_template_id (str): + Required. The id of the tag template + to create. + + This corresponds to the ``tag_template_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag_template (google.cloud.datacatalog_v1beta1.types.TagTemplate): + Required. The tag template to create. + This corresponds to the ``tag_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.TagTemplate: + A tag template defines a tag, which can have one or more typed fields. + The template is used to create and attach the tag to + Google Cloud resources. [Tag template + roles](\ https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) + provide permissions to create, edit, and use the + template. See, for example, the [TagTemplate + User](\ https://cloud.google.com/data-catalog/docs/how-to/template-user) + role, which includes permission to use the tag + template to tag resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, tag_template_id, tag_template]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.CreateTagTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.CreateTagTemplateRequest): + request = datacatalog.CreateTagTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if tag_template_id is not None: + request.tag_template_id = tag_template_id + if tag_template is not None: + request.tag_template = tag_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_tag_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_tag_template(self, + request: Optional[Union[datacatalog.GetTagTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplate: + r"""Gets a tag template. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_get_tag_template(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.GetTagTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_tag_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.GetTagTemplateRequest, dict]): + The request object. Request message for + [GetTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.GetTagTemplate]. + name (str): + Required. The name of the tag template. Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.TagTemplate: + A tag template defines a tag, which can have one or more typed fields. + The template is used to create and attach the tag to + Google Cloud resources. [Tag template + roles](\ https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) + provide permissions to create, edit, and use the + template. See, for example, the [TagTemplate + User](\ https://cloud.google.com/data-catalog/docs/how-to/template-user) + role, which includes permission to use the tag + template to tag resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.GetTagTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.GetTagTemplateRequest): + request = datacatalog.GetTagTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_tag_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_tag_template(self, + request: Optional[Union[datacatalog.UpdateTagTemplateRequest, dict]] = None, + *, + tag_template: Optional[tags.TagTemplate] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplate: + r"""Updates a tag template. This method cannot be used to update the + fields of a template. The tag template fields are represented as + separate resources and should be updated using their own + create/update/delete methods. Users should enable the Data + Catalog API in the project identified by the + ``tag_template.name`` parameter (see [Data Catalog Resource + Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_update_tag_template(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.UpdateTagTemplateRequest( + ) + + # Make the request + response = client.update_tag_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.UpdateTagTemplateRequest, dict]): + The request object. Request message for + [UpdateTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplate]. + tag_template (google.cloud.datacatalog_v1beta1.types.TagTemplate): + Required. The template to update. The + "name" field must be set. + + This corresponds to the ``tag_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Names of fields whose values to overwrite on a tag + template. Currently, only ``display_name`` can be + overwritten. + + In general, if this parameter is absent or empty, all + modifiable fields are overwritten. If such fields are + non-required and omitted in the request body, their + values are emptied. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.TagTemplate: + A tag template defines a tag, which can have one or more typed fields. + The template is used to create and attach the tag to + Google Cloud resources. [Tag template + roles](\ https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) + provide permissions to create, edit, and use the + template. See, for example, the [TagTemplate + User](\ https://cloud.google.com/data-catalog/docs/how-to/template-user) + role, which includes permission to use the tag + template to tag resources. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([tag_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.UpdateTagTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.UpdateTagTemplateRequest): + request = datacatalog.UpdateTagTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if tag_template is not None: + request.tag_template = tag_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_tag_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("tag_template.name", request.tag_template.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_tag_template(self, + request: Optional[Union[datacatalog.DeleteTagTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + force: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a tag template and all tags using the template. Users + should enable the Data Catalog API in the project identified by + the ``name`` parameter (see [Data Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_delete_tag_template(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteTagTemplateRequest( + name="name_value", + force=True, + ) + + # Make the request + client.delete_tag_template(request=request) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.DeleteTagTemplateRequest, dict]): + The request object. Request message for + [DeleteTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplate]. + name (str): + Required. The name of the tag template to delete. + Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + force (bool): + Required. Currently, this field must always be set to + ``true``. This confirms the deletion of any possible + tags using this template. ``force = false`` will be + supported in the future. + + This corresponds to the ``force`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, force]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.DeleteTagTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.DeleteTagTemplateRequest): + request = datacatalog.DeleteTagTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if force is not None: + request.force = force + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_tag_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_tag_template_field(self, + request: Optional[Union[datacatalog.CreateTagTemplateFieldRequest, dict]] = None, + *, + parent: Optional[str] = None, + tag_template_field_id: Optional[str] = None, + tag_template_field: Optional[tags.TagTemplateField] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplateField: + r"""Creates a field in a tag template. The user should enable the + Data Catalog API in the project identified by the ``parent`` + parameter (see `Data Catalog Resource + Project `__ + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_create_tag_template_field(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + tag_template_field = datacatalog_v1beta1.TagTemplateField() + tag_template_field.type_.primitive_type = "TIMESTAMP" + + request = datacatalog_v1beta1.CreateTagTemplateFieldRequest( + parent="parent_value", + tag_template_field_id="tag_template_field_id_value", + tag_template_field=tag_template_field, + ) + + # Make the request + response = client.create_tag_template_field(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.CreateTagTemplateFieldRequest, dict]): + The request object. Request message for + [CreateTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplateField]. + parent (str): + Required. The name of the project and the template + location + `region `__. + + Example: + + - projects/{project_id}/locations/us-central1/tagTemplates/{tag_template_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag_template_field_id (str): + Required. The ID of the tag template field to create. + Field ids can contain letters (both uppercase and + lowercase), numbers (0-9), underscores (_) and dashes + (-). Field IDs must be at least 1 character long and at + most 128 characters long. Field IDs must also be unique + within their template. + + This corresponds to the ``tag_template_field_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag_template_field (google.cloud.datacatalog_v1beta1.types.TagTemplateField): + Required. The tag template field to + create. + + This corresponds to the ``tag_template_field`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.TagTemplateField: + The template for an individual field + within a tag template. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, tag_template_field_id, tag_template_field]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.CreateTagTemplateFieldRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.CreateTagTemplateFieldRequest): + request = datacatalog.CreateTagTemplateFieldRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if tag_template_field_id is not None: + request.tag_template_field_id = tag_template_field_id + if tag_template_field is not None: + request.tag_template_field = tag_template_field + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_tag_template_field] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_tag_template_field(self, + request: Optional[Union[datacatalog.UpdateTagTemplateFieldRequest, dict]] = None, + *, + name: Optional[str] = None, + tag_template_field: Optional[tags.TagTemplateField] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplateField: + r"""Updates a field in a tag template. This method cannot be used to + update the field type. Users should enable the Data Catalog API + in the project identified by the ``name`` parameter (see [Data + Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_update_tag_template_field(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + tag_template_field = datacatalog_v1beta1.TagTemplateField() + tag_template_field.type_.primitive_type = "TIMESTAMP" + + request = datacatalog_v1beta1.UpdateTagTemplateFieldRequest( + name="name_value", + tag_template_field=tag_template_field, + ) + + # Make the request + response = client.update_tag_template_field(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.UpdateTagTemplateFieldRequest, dict]): + The request object. Request message for + [UpdateTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplateField]. + name (str): + Required. The name of the tag template field. Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag_template_field (google.cloud.datacatalog_v1beta1.types.TagTemplateField): + Required. The template to update. + This corresponds to the ``tag_template_field`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Names of fields whose values to overwrite on + an individual field of a tag template. The following + fields are modifiable: + + - ``display_name`` + - ``type.enum_type`` + - ``is_required`` + + If this parameter is absent or empty, all modifiable + fields are overwritten. If such fields are non-required + and omitted in the request body, their values are + emptied with one exception: when updating an enum type, + the provided values are merged with the existing values. + Therefore, enum values can only be added, existing enum + values cannot be deleted or renamed. + + Additionally, updating a template field from optional to + required is *not* allowed. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.TagTemplateField: + The template for an individual field + within a tag template. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, tag_template_field, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.UpdateTagTemplateFieldRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.UpdateTagTemplateFieldRequest): + request = datacatalog.UpdateTagTemplateFieldRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if tag_template_field is not None: + request.tag_template_field = tag_template_field + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_tag_template_field] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def rename_tag_template_field(self, + request: Optional[Union[datacatalog.RenameTagTemplateFieldRequest, dict]] = None, + *, + name: Optional[str] = None, + new_tag_template_field_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplateField: + r"""Renames a field in a tag template. The user should enable the + Data Catalog API in the project identified by the ``name`` + parameter (see `Data Catalog Resource + Project `__ + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_rename_tag_template_field(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.RenameTagTemplateFieldRequest( + name="name_value", + new_tag_template_field_id="new_tag_template_field_id_value", + ) + + # Make the request + response = client.rename_tag_template_field(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.RenameTagTemplateFieldRequest, dict]): + The request object. Request message for + [RenameTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.RenameTagTemplateField]. + name (str): + Required. The name of the tag template. Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_tag_template_field_id (str): + Required. The new ID of this tag template field. For + example, ``my_new_field``. + + This corresponds to the ``new_tag_template_field_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.TagTemplateField: + The template for an individual field + within a tag template. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, new_tag_template_field_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.RenameTagTemplateFieldRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.RenameTagTemplateFieldRequest): + request = datacatalog.RenameTagTemplateFieldRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if new_tag_template_field_id is not None: + request.new_tag_template_field_id = new_tag_template_field_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.rename_tag_template_field] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def rename_tag_template_field_enum_value(self, + request: Optional[Union[datacatalog.RenameTagTemplateFieldEnumValueRequest, dict]] = None, + *, + name: Optional[str] = None, + new_enum_value_display_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.TagTemplateField: + r"""Renames an enum value in a tag template. The enum + values have to be unique within one enum field. Thus, an + enum value cannot be renamed with a name used in any + other enum value within the same enum field. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_rename_tag_template_field_enum_value(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.RenameTagTemplateFieldEnumValueRequest( + name="name_value", + new_enum_value_display_name="new_enum_value_display_name_value", + ) + + # Make the request + response = client.rename_tag_template_field_enum_value(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.RenameTagTemplateFieldEnumValueRequest, dict]): + The request object. Request message for + [RenameTagTemplateFieldEnumValue][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateFieldEnumValue]. + name (str): + Required. The name of the enum field value. Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + new_enum_value_display_name (str): + Required. The new display name of the enum value. For + example, ``my_new_enum_value``. + + This corresponds to the ``new_enum_value_display_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.TagTemplateField: + The template for an individual field + within a tag template. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, new_enum_value_display_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.RenameTagTemplateFieldEnumValueRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.RenameTagTemplateFieldEnumValueRequest): + request = datacatalog.RenameTagTemplateFieldEnumValueRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if new_enum_value_display_name is not None: + request.new_enum_value_display_name = new_enum_value_display_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.rename_tag_template_field_enum_value] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_tag_template_field(self, + request: Optional[Union[datacatalog.DeleteTagTemplateFieldRequest, dict]] = None, + *, + name: Optional[str] = None, + force: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a field in a tag template and all uses of that field. + Users should enable the Data Catalog API in the project + identified by the ``name`` parameter (see [Data Catalog Resource + Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_delete_tag_template_field(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteTagTemplateFieldRequest( + name="name_value", + force=True, + ) + + # Make the request + client.delete_tag_template_field(request=request) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.DeleteTagTemplateFieldRequest, dict]): + The request object. Request message for + [DeleteTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplateField]. + name (str): + Required. The name of the tag template field to delete. + Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + force (bool): + Required. Currently, this field must always be set to + ``true``. This confirms the deletion of this field from + any tags using this field. ``force = false`` will be + supported in the future. + + This corresponds to the ``force`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, force]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.DeleteTagTemplateFieldRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.DeleteTagTemplateFieldRequest): + request = datacatalog.DeleteTagTemplateFieldRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if force is not None: + request.force = force + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_tag_template_field] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_tag(self, + request: Optional[Union[datacatalog.CreateTagRequest, dict]] = None, + *, + parent: Optional[str] = None, + tag: Optional[tags.Tag] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.Tag: + r"""Creates a tag on an + [Entry][google.cloud.datacatalog.v1beta1.Entry]. Note: The + project identified by the ``parent`` parameter for the + `tag `__ + and the `tag + template `__ + used to create the tag must be from the same organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_create_tag(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + tag = datacatalog_v1beta1.Tag() + tag.column = "column_value" + tag.template = "template_value" + + request = datacatalog_v1beta1.CreateTagRequest( + parent="parent_value", + tag=tag, + ) + + # Make the request + response = client.create_tag(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.CreateTagRequest, dict]): + The request object. Request message for + [CreateTag][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTag]. + parent (str): + Required. The name of the resource to attach this tag + to. Tags can be attached to Entries. Example: + + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + + Note that this Tag and its child resources may not + actually be stored in the location in this name. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tag (google.cloud.datacatalog_v1beta1.types.Tag): + Required. The tag to create. + This corresponds to the ``tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.Tag: + Tags are used to attach custom metadata to Data Catalog resources. Tags + conform to the specifications within their tag + template. + + See [Data Catalog + IAM](\ https://cloud.google.com/data-catalog/docs/concepts/iam) + for information on the permissions needed to create + or view tags. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, tag]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.CreateTagRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.CreateTagRequest): + request = datacatalog.CreateTagRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if tag is not None: + request.tag = tag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_tag] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_tag(self, + request: Optional[Union[datacatalog.UpdateTagRequest, dict]] = None, + *, + tag: Optional[tags.Tag] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> tags.Tag: + r"""Updates an existing tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_update_tag(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + tag = datacatalog_v1beta1.Tag() + tag.column = "column_value" + tag.template = "template_value" + + request = datacatalog_v1beta1.UpdateTagRequest( + tag=tag, + ) + + # Make the request + response = client.update_tag(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.UpdateTagRequest, dict]): + The request object. Request message for + [UpdateTag][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTag]. + tag (google.cloud.datacatalog_v1beta1.types.Tag): + Required. The updated tag. The "name" + field must be set. + + This corresponds to the ``tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Note: Currently, this parameter can only take + ``"fields"`` as value. + + Names of fields whose values to overwrite on a tag. + Currently, a tag has the only modifiable field with the + name ``fields``. + + In general, if this parameter is absent or empty, all + modifiable fields are overwritten. If such fields are + non-required and omitted in the request body, their + values are emptied. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.Tag: + Tags are used to attach custom metadata to Data Catalog resources. Tags + conform to the specifications within their tag + template. + + See [Data Catalog + IAM](\ https://cloud.google.com/data-catalog/docs/concepts/iam) + for information on the permissions needed to create + or view tags. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([tag, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.UpdateTagRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.UpdateTagRequest): + request = datacatalog.UpdateTagRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if tag is not None: + request.tag = tag + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_tag] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("tag.name", request.tag.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_tag(self, + request: Optional[Union[datacatalog.DeleteTagRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_delete_tag(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteTagRequest( + name="name_value", + ) + + # Make the request + client.delete_tag(request=request) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.DeleteTagRequest, dict]): + The request object. Request message for + [DeleteTag][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTag]. + name (str): + Required. The name of the tag to delete. Example: + + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.DeleteTagRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.DeleteTagRequest): + request = datacatalog.DeleteTagRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_tag] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_tags(self, + request: Optional[Union[datacatalog.ListTagsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTagsPager: + r"""Lists tags assigned to an + [Entry][google.cloud.datacatalog.v1beta1.Entry]. The + [columns][google.cloud.datacatalog.v1beta1.Tag.column] in the + response are lowercased. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_list_tags(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ListTagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tags(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.ListTagsRequest, dict]): + The request object. Request message for + [ListTags][google.cloud.datacatalog.v1beta1.DataCatalog.ListTags]. + parent (str): + Required. The name of the Data Catalog resource to list + the tags of. The resource could be an + [Entry][google.cloud.datacatalog.v1beta1.Entry] or an + [EntryGroup][google.cloud.datacatalog.v1beta1.EntryGroup]. + + Examples: + + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListTagsPager: + Response message for + [ListTags][google.cloud.datacatalog.v1beta1.DataCatalog.ListTags]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a datacatalog.ListTagsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datacatalog.ListTagsRequest): + request = datacatalog.ListTagsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_tags] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTagsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the access control policy for a resource. Replaces any + existing policy. Supported resources are: + + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage + policies for BigQuery, Pub/Sub and any external Google Cloud + Platform resources synced to Data Catalog. + + Callers must have following Google IAM permission + + - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on + tag templates. + - ``datacatalog.entries.setIamPolicy`` to set policies on + entries. + - ``datacatalog.entryGroups.setIamPolicy`` to set policies on + entry groups. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_set_iam_policy(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): + The request object. Request message for ``SetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.SetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a resource. A ``NOT_FOUND`` + error is returned if the resource does not exist. An empty + policy is returned if the resource exists but does not have a + policy set on it. + + Supported resources are: + + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage + policies for BigQuery, Pub/Sub and any external Google Cloud + Platform resources synced to Data Catalog. + + Callers must have following Google IAM permission + + - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on + tag templates. + - ``datacatalog.entries.getIamPolicy`` to get policies on + entries. + - ``datacatalog.entryGroups.getIamPolicy`` to get policies on + entry groups. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_get_iam_policy(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): + The request object. Request message for ``GetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.GetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns the caller's permissions on a resource. If the resource + does not exist, an empty set of permissions is returned (We + don't return a ``NOT_FOUND`` error). + + Supported resources are: + + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage + policies for BigQuery, Pub/Sub and any external Google Cloud + Platform resources synced to Data Catalog. + + A caller is not required to have Google IAM permission to make + this request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_test_iam_permissions(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DataCatalogClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "DataCatalogClient", +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/pagers.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/pagers.py new file mode 100644 index 000000000000..7aa71d9465ff --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/pagers.py @@ -0,0 +1,504 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.datacatalog_v1beta1.types import datacatalog +from google.cloud.datacatalog_v1beta1.types import search +from google.cloud.datacatalog_v1beta1.types import tags + + +class SearchCatalogPager: + """A pager for iterating through ``search_catalog`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1beta1.types.SearchCatalogResponse` object, and + provides an ``__iter__`` method to iterate through its + ``results`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``SearchCatalog`` requests and continue to iterate + through the ``results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1beta1.types.SearchCatalogResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., datacatalog.SearchCatalogResponse], + request: datacatalog.SearchCatalogRequest, + response: datacatalog.SearchCatalogResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1beta1.types.SearchCatalogRequest): + The initial request object. + response (google.cloud.datacatalog_v1beta1.types.SearchCatalogResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datacatalog.SearchCatalogRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[datacatalog.SearchCatalogResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[search.SearchCatalogResult]: + for page in self.pages: + yield from page.results + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class SearchCatalogAsyncPager: + """A pager for iterating through ``search_catalog`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1beta1.types.SearchCatalogResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``results`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``SearchCatalog`` requests and continue to iterate + through the ``results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1beta1.types.SearchCatalogResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[datacatalog.SearchCatalogResponse]], + request: datacatalog.SearchCatalogRequest, + response: datacatalog.SearchCatalogResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1beta1.types.SearchCatalogRequest): + The initial request object. + response (google.cloud.datacatalog_v1beta1.types.SearchCatalogResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datacatalog.SearchCatalogRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[datacatalog.SearchCatalogResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[search.SearchCatalogResult]: + async def async_generator(): + async for page in self.pages: + for response in page.results: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEntryGroupsPager: + """A pager for iterating through ``list_entry_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1beta1.types.ListEntryGroupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``entry_groups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEntryGroups`` requests and continue to iterate + through the ``entry_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1beta1.types.ListEntryGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., datacatalog.ListEntryGroupsResponse], + request: datacatalog.ListEntryGroupsRequest, + response: datacatalog.ListEntryGroupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1beta1.types.ListEntryGroupsRequest): + The initial request object. + response (google.cloud.datacatalog_v1beta1.types.ListEntryGroupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datacatalog.ListEntryGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[datacatalog.ListEntryGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[datacatalog.EntryGroup]: + for page in self.pages: + yield from page.entry_groups + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEntryGroupsAsyncPager: + """A pager for iterating through ``list_entry_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1beta1.types.ListEntryGroupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``entry_groups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEntryGroups`` requests and continue to iterate + through the ``entry_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1beta1.types.ListEntryGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[datacatalog.ListEntryGroupsResponse]], + request: datacatalog.ListEntryGroupsRequest, + response: datacatalog.ListEntryGroupsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1beta1.types.ListEntryGroupsRequest): + The initial request object. + response (google.cloud.datacatalog_v1beta1.types.ListEntryGroupsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datacatalog.ListEntryGroupsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[datacatalog.ListEntryGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[datacatalog.EntryGroup]: + async def async_generator(): + async for page in self.pages: + for response in page.entry_groups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEntriesPager: + """A pager for iterating through ``list_entries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1beta1.types.ListEntriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``entries`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEntries`` requests and continue to iterate + through the ``entries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1beta1.types.ListEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., datacatalog.ListEntriesResponse], + request: datacatalog.ListEntriesRequest, + response: datacatalog.ListEntriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1beta1.types.ListEntriesRequest): + The initial request object. + response (google.cloud.datacatalog_v1beta1.types.ListEntriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datacatalog.ListEntriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[datacatalog.ListEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[datacatalog.Entry]: + for page in self.pages: + yield from page.entries + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListEntriesAsyncPager: + """A pager for iterating through ``list_entries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1beta1.types.ListEntriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``entries`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEntries`` requests and continue to iterate + through the ``entries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1beta1.types.ListEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[datacatalog.ListEntriesResponse]], + request: datacatalog.ListEntriesRequest, + response: datacatalog.ListEntriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1beta1.types.ListEntriesRequest): + The initial request object. + response (google.cloud.datacatalog_v1beta1.types.ListEntriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datacatalog.ListEntriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[datacatalog.ListEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[datacatalog.Entry]: + async def async_generator(): + async for page in self.pages: + for response in page.entries: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTagsPager: + """A pager for iterating through ``list_tags`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1beta1.types.ListTagsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``tags`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTags`` requests and continue to iterate + through the ``tags`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1beta1.types.ListTagsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., datacatalog.ListTagsResponse], + request: datacatalog.ListTagsRequest, + response: datacatalog.ListTagsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1beta1.types.ListTagsRequest): + The initial request object. + response (google.cloud.datacatalog_v1beta1.types.ListTagsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datacatalog.ListTagsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[datacatalog.ListTagsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[tags.Tag]: + for page in self.pages: + yield from page.tags + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTagsAsyncPager: + """A pager for iterating through ``list_tags`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1beta1.types.ListTagsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``tags`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTags`` requests and continue to iterate + through the ``tags`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1beta1.types.ListTagsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[datacatalog.ListTagsResponse]], + request: datacatalog.ListTagsRequest, + response: datacatalog.ListTagsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1beta1.types.ListTagsRequest): + The initial request object. + response (google.cloud.datacatalog_v1beta1.types.ListTagsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datacatalog.ListTagsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[datacatalog.ListTagsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[tags.Tag]: + async def async_generator(): + async for page in self.pages: + for response in page.tags: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/__init__.py new file mode 100644 index 000000000000..8b4fbbf168be --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DataCatalogTransport +from .grpc import DataCatalogGrpcTransport +from .grpc_asyncio import DataCatalogGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DataCatalogTransport]] +_transport_registry['grpc'] = DataCatalogGrpcTransport +_transport_registry['grpc_asyncio'] = DataCatalogGrpcAsyncIOTransport + +__all__ = ( + 'DataCatalogTransport', + 'DataCatalogGrpcTransport', + 'DataCatalogGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py new file mode 100644 index 000000000000..78f23cdcf3c7 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py @@ -0,0 +1,531 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.datacatalog_v1beta1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.datacatalog_v1beta1.types import datacatalog +from google.cloud.datacatalog_v1beta1.types import tags +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class DataCatalogTransport(abc.ABC): + """Abstract transport class for DataCatalog.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'datacatalog.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.search_catalog: gapic_v1.method.wrap_method( + self.search_catalog, + default_timeout=None, + client_info=client_info, + ), + self.create_entry_group: gapic_v1.method.wrap_method( + self.create_entry_group, + default_timeout=None, + client_info=client_info, + ), + self.update_entry_group: gapic_v1.method.wrap_method( + self.update_entry_group, + default_timeout=None, + client_info=client_info, + ), + self.get_entry_group: gapic_v1.method.wrap_method( + self.get_entry_group, + default_timeout=None, + client_info=client_info, + ), + self.delete_entry_group: gapic_v1.method.wrap_method( + self.delete_entry_group, + default_timeout=None, + client_info=client_info, + ), + self.list_entry_groups: gapic_v1.method.wrap_method( + self.list_entry_groups, + default_timeout=None, + client_info=client_info, + ), + self.create_entry: gapic_v1.method.wrap_method( + self.create_entry, + default_timeout=None, + client_info=client_info, + ), + self.update_entry: gapic_v1.method.wrap_method( + self.update_entry, + default_timeout=None, + client_info=client_info, + ), + self.delete_entry: gapic_v1.method.wrap_method( + self.delete_entry, + default_timeout=None, + client_info=client_info, + ), + self.get_entry: gapic_v1.method.wrap_method( + self.get_entry, + default_timeout=None, + client_info=client_info, + ), + self.lookup_entry: gapic_v1.method.wrap_method( + self.lookup_entry, + default_timeout=None, + client_info=client_info, + ), + self.list_entries: gapic_v1.method.wrap_method( + self.list_entries, + default_timeout=None, + client_info=client_info, + ), + self.create_tag_template: gapic_v1.method.wrap_method( + self.create_tag_template, + default_timeout=None, + client_info=client_info, + ), + self.get_tag_template: gapic_v1.method.wrap_method( + self.get_tag_template, + default_timeout=None, + client_info=client_info, + ), + self.update_tag_template: gapic_v1.method.wrap_method( + self.update_tag_template, + default_timeout=None, + client_info=client_info, + ), + self.delete_tag_template: gapic_v1.method.wrap_method( + self.delete_tag_template, + default_timeout=None, + client_info=client_info, + ), + self.create_tag_template_field: gapic_v1.method.wrap_method( + self.create_tag_template_field, + default_timeout=None, + client_info=client_info, + ), + self.update_tag_template_field: gapic_v1.method.wrap_method( + self.update_tag_template_field, + default_timeout=None, + client_info=client_info, + ), + self.rename_tag_template_field: gapic_v1.method.wrap_method( + self.rename_tag_template_field, + default_timeout=None, + client_info=client_info, + ), + self.rename_tag_template_field_enum_value: gapic_v1.method.wrap_method( + self.rename_tag_template_field_enum_value, + default_timeout=None, + client_info=client_info, + ), + self.delete_tag_template_field: gapic_v1.method.wrap_method( + self.delete_tag_template_field, + default_timeout=None, + client_info=client_info, + ), + self.create_tag: gapic_v1.method.wrap_method( + self.create_tag, + default_timeout=None, + client_info=client_info, + ), + self.update_tag: gapic_v1.method.wrap_method( + self.update_tag, + default_timeout=None, + client_info=client_info, + ), + self.delete_tag: gapic_v1.method.wrap_method( + self.delete_tag, + default_timeout=None, + client_info=client_info, + ), + self.list_tags: gapic_v1.method.wrap_method( + self.list_tags, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def search_catalog(self) -> Callable[ + [datacatalog.SearchCatalogRequest], + Union[ + datacatalog.SearchCatalogResponse, + Awaitable[datacatalog.SearchCatalogResponse] + ]]: + raise NotImplementedError() + + @property + def create_entry_group(self) -> Callable[ + [datacatalog.CreateEntryGroupRequest], + Union[ + datacatalog.EntryGroup, + Awaitable[datacatalog.EntryGroup] + ]]: + raise NotImplementedError() + + @property + def update_entry_group(self) -> Callable[ + [datacatalog.UpdateEntryGroupRequest], + Union[ + datacatalog.EntryGroup, + Awaitable[datacatalog.EntryGroup] + ]]: + raise NotImplementedError() + + @property + def get_entry_group(self) -> Callable[ + [datacatalog.GetEntryGroupRequest], + Union[ + datacatalog.EntryGroup, + Awaitable[datacatalog.EntryGroup] + ]]: + raise NotImplementedError() + + @property + def delete_entry_group(self) -> Callable[ + [datacatalog.DeleteEntryGroupRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_entry_groups(self) -> Callable[ + [datacatalog.ListEntryGroupsRequest], + Union[ + datacatalog.ListEntryGroupsResponse, + Awaitable[datacatalog.ListEntryGroupsResponse] + ]]: + raise NotImplementedError() + + @property + def create_entry(self) -> Callable[ + [datacatalog.CreateEntryRequest], + Union[ + datacatalog.Entry, + Awaitable[datacatalog.Entry] + ]]: + raise NotImplementedError() + + @property + def update_entry(self) -> Callable[ + [datacatalog.UpdateEntryRequest], + Union[ + datacatalog.Entry, + Awaitable[datacatalog.Entry] + ]]: + raise NotImplementedError() + + @property + def delete_entry(self) -> Callable[ + [datacatalog.DeleteEntryRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def get_entry(self) -> Callable[ + [datacatalog.GetEntryRequest], + Union[ + datacatalog.Entry, + Awaitable[datacatalog.Entry] + ]]: + raise NotImplementedError() + + @property + def lookup_entry(self) -> Callable[ + [datacatalog.LookupEntryRequest], + Union[ + datacatalog.Entry, + Awaitable[datacatalog.Entry] + ]]: + raise NotImplementedError() + + @property + def list_entries(self) -> Callable[ + [datacatalog.ListEntriesRequest], + Union[ + datacatalog.ListEntriesResponse, + Awaitable[datacatalog.ListEntriesResponse] + ]]: + raise NotImplementedError() + + @property + def create_tag_template(self) -> Callable[ + [datacatalog.CreateTagTemplateRequest], + Union[ + tags.TagTemplate, + Awaitable[tags.TagTemplate] + ]]: + raise NotImplementedError() + + @property + def get_tag_template(self) -> Callable[ + [datacatalog.GetTagTemplateRequest], + Union[ + tags.TagTemplate, + Awaitable[tags.TagTemplate] + ]]: + raise NotImplementedError() + + @property + def update_tag_template(self) -> Callable[ + [datacatalog.UpdateTagTemplateRequest], + Union[ + tags.TagTemplate, + Awaitable[tags.TagTemplate] + ]]: + raise NotImplementedError() + + @property + def delete_tag_template(self) -> Callable[ + [datacatalog.DeleteTagTemplateRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_tag_template_field(self) -> Callable[ + [datacatalog.CreateTagTemplateFieldRequest], + Union[ + tags.TagTemplateField, + Awaitable[tags.TagTemplateField] + ]]: + raise NotImplementedError() + + @property + def update_tag_template_field(self) -> Callable[ + [datacatalog.UpdateTagTemplateFieldRequest], + Union[ + tags.TagTemplateField, + Awaitable[tags.TagTemplateField] + ]]: + raise NotImplementedError() + + @property + def rename_tag_template_field(self) -> Callable[ + [datacatalog.RenameTagTemplateFieldRequest], + Union[ + tags.TagTemplateField, + Awaitable[tags.TagTemplateField] + ]]: + raise NotImplementedError() + + @property + def rename_tag_template_field_enum_value(self) -> Callable[ + [datacatalog.RenameTagTemplateFieldEnumValueRequest], + Union[ + tags.TagTemplateField, + Awaitable[tags.TagTemplateField] + ]]: + raise NotImplementedError() + + @property + def delete_tag_template_field(self) -> Callable[ + [datacatalog.DeleteTagTemplateFieldRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_tag(self) -> Callable[ + [datacatalog.CreateTagRequest], + Union[ + tags.Tag, + Awaitable[tags.Tag] + ]]: + raise NotImplementedError() + + @property + def update_tag(self) -> Callable[ + [datacatalog.UpdateTagRequest], + Union[ + tags.Tag, + Awaitable[tags.Tag] + ]]: + raise NotImplementedError() + + @property + def delete_tag(self) -> Callable[ + [datacatalog.DeleteTagRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_tags(self) -> Callable[ + [datacatalog.ListTagsRequest], + Union[ + datacatalog.ListTagsResponse, + Awaitable[datacatalog.ListTagsResponse] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'DataCatalogTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py new file mode 100644 index 000000000000..a93af57fea57 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py @@ -0,0 +1,1122 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.datacatalog_v1beta1.types import datacatalog +from google.cloud.datacatalog_v1beta1.types import tags +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO + + +class DataCatalogGrpcTransport(DataCatalogTransport): + """gRPC backend transport for DataCatalog. + + Data Catalog API service allows clients to discover, + understand, and manage their data. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def search_catalog(self) -> Callable[ + [datacatalog.SearchCatalogRequest], + datacatalog.SearchCatalogResponse]: + r"""Return a callable for the search catalog method over gRPC. + + Searches Data Catalog for multiple resources like entries, tags + that match a query. + + This is a custom method + (https://cloud.google.com/apis/design/custom_methods) and does + not return the complete resource, only the resource identifier + and high level fields. Clients can subsequently call ``Get`` + methods. + + Note that Data Catalog search queries do not guarantee full + recall. Query results that match your query may not be returned, + even in subsequent result pages. Also note that results returned + (and not returned) can vary across repeated search queries. + + See `Data Catalog Search + Syntax `__ + for more information. + + Returns: + Callable[[~.SearchCatalogRequest], + ~.SearchCatalogResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'search_catalog' not in self._stubs: + self._stubs['search_catalog'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/SearchCatalog', + request_serializer=datacatalog.SearchCatalogRequest.serialize, + response_deserializer=datacatalog.SearchCatalogResponse.deserialize, + ) + return self._stubs['search_catalog'] + + @property + def create_entry_group(self) -> Callable[ + [datacatalog.CreateEntryGroupRequest], + datacatalog.EntryGroup]: + r"""Return a callable for the create entry group method over gRPC. + + A maximum of 10,000 entry groups may be created per organization + across all locations. + + Users should enable the Data Catalog API in the project + identified by the ``parent`` parameter (see [Data Catalog + Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + Returns: + Callable[[~.CreateEntryGroupRequest], + ~.EntryGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entry_group' not in self._stubs: + self._stubs['create_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/CreateEntryGroup', + request_serializer=datacatalog.CreateEntryGroupRequest.serialize, + response_deserializer=datacatalog.EntryGroup.deserialize, + ) + return self._stubs['create_entry_group'] + + @property + def update_entry_group(self) -> Callable[ + [datacatalog.UpdateEntryGroupRequest], + datacatalog.EntryGroup]: + r"""Return a callable for the update entry group method over gRPC. + + Updates an EntryGroup. The user should enable the Data Catalog + API in the project identified by the ``entry_group.name`` + parameter (see [Data Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + Returns: + Callable[[~.UpdateEntryGroupRequest], + ~.EntryGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entry_group' not in self._stubs: + self._stubs['update_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/UpdateEntryGroup', + request_serializer=datacatalog.UpdateEntryGroupRequest.serialize, + response_deserializer=datacatalog.EntryGroup.deserialize, + ) + return self._stubs['update_entry_group'] + + @property + def get_entry_group(self) -> Callable[ + [datacatalog.GetEntryGroupRequest], + datacatalog.EntryGroup]: + r"""Return a callable for the get entry group method over gRPC. + + Gets an EntryGroup. + + Returns: + Callable[[~.GetEntryGroupRequest], + ~.EntryGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entry_group' not in self._stubs: + self._stubs['get_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/GetEntryGroup', + request_serializer=datacatalog.GetEntryGroupRequest.serialize, + response_deserializer=datacatalog.EntryGroup.deserialize, + ) + return self._stubs['get_entry_group'] + + @property + def delete_entry_group(self) -> Callable[ + [datacatalog.DeleteEntryGroupRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete entry group method over gRPC. + + Deletes an EntryGroup. Only entry groups that do not contain + entries can be deleted. Users should enable the Data Catalog API + in the project identified by the ``name`` parameter (see [Data + Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + Returns: + Callable[[~.DeleteEntryGroupRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entry_group' not in self._stubs: + self._stubs['delete_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteEntryGroup', + request_serializer=datacatalog.DeleteEntryGroupRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_entry_group'] + + @property + def list_entry_groups(self) -> Callable[ + [datacatalog.ListEntryGroupsRequest], + datacatalog.ListEntryGroupsResponse]: + r"""Return a callable for the list entry groups method over gRPC. + + Lists entry groups. + + Returns: + Callable[[~.ListEntryGroupsRequest], + ~.ListEntryGroupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entry_groups' not in self._stubs: + self._stubs['list_entry_groups'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/ListEntryGroups', + request_serializer=datacatalog.ListEntryGroupsRequest.serialize, + response_deserializer=datacatalog.ListEntryGroupsResponse.deserialize, + ) + return self._stubs['list_entry_groups'] + + @property + def create_entry(self) -> Callable[ + [datacatalog.CreateEntryRequest], + datacatalog.Entry]: + r"""Return a callable for the create entry method over gRPC. + + Creates an entry. Only entries of 'FILESET' type or + user-specified type can be created. + + Users should enable the Data Catalog API in the project + identified by the ``parent`` parameter (see [Data Catalog + Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + A maximum of 100,000 entries may be created per entry group. + + Returns: + Callable[[~.CreateEntryRequest], + ~.Entry]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entry' not in self._stubs: + self._stubs['create_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/CreateEntry', + request_serializer=datacatalog.CreateEntryRequest.serialize, + response_deserializer=datacatalog.Entry.deserialize, + ) + return self._stubs['create_entry'] + + @property + def update_entry(self) -> Callable[ + [datacatalog.UpdateEntryRequest], + datacatalog.Entry]: + r"""Return a callable for the update entry method over gRPC. + + Updates an existing entry. Users should enable the Data Catalog + API in the project identified by the ``entry.name`` parameter + (see [Data Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + Returns: + Callable[[~.UpdateEntryRequest], + ~.Entry]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entry' not in self._stubs: + self._stubs['update_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/UpdateEntry', + request_serializer=datacatalog.UpdateEntryRequest.serialize, + response_deserializer=datacatalog.Entry.deserialize, + ) + return self._stubs['update_entry'] + + @property + def delete_entry(self) -> Callable[ + [datacatalog.DeleteEntryRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete entry method over gRPC. + + Deletes an existing entry. Only entries created through + [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry] + method can be deleted. Users should enable the Data Catalog API + in the project identified by the ``name`` parameter (see [Data + Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + Returns: + Callable[[~.DeleteEntryRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entry' not in self._stubs: + self._stubs['delete_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteEntry', + request_serializer=datacatalog.DeleteEntryRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_entry'] + + @property + def get_entry(self) -> Callable[ + [datacatalog.GetEntryRequest], + datacatalog.Entry]: + r"""Return a callable for the get entry method over gRPC. + + Gets an entry. + + Returns: + Callable[[~.GetEntryRequest], + ~.Entry]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entry' not in self._stubs: + self._stubs['get_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/GetEntry', + request_serializer=datacatalog.GetEntryRequest.serialize, + response_deserializer=datacatalog.Entry.deserialize, + ) + return self._stubs['get_entry'] + + @property + def lookup_entry(self) -> Callable[ + [datacatalog.LookupEntryRequest], + datacatalog.Entry]: + r"""Return a callable for the lookup entry method over gRPC. + + Get an entry by target resource name. This method + allows clients to use the resource name from the source + Google Cloud Platform service to get the Data Catalog + Entry. + + Returns: + Callable[[~.LookupEntryRequest], + ~.Entry]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'lookup_entry' not in self._stubs: + self._stubs['lookup_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/LookupEntry', + request_serializer=datacatalog.LookupEntryRequest.serialize, + response_deserializer=datacatalog.Entry.deserialize, + ) + return self._stubs['lookup_entry'] + + @property + def list_entries(self) -> Callable[ + [datacatalog.ListEntriesRequest], + datacatalog.ListEntriesResponse]: + r"""Return a callable for the list entries method over gRPC. + + Lists entries. + + Returns: + Callable[[~.ListEntriesRequest], + ~.ListEntriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entries' not in self._stubs: + self._stubs['list_entries'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/ListEntries', + request_serializer=datacatalog.ListEntriesRequest.serialize, + response_deserializer=datacatalog.ListEntriesResponse.deserialize, + ) + return self._stubs['list_entries'] + + @property + def create_tag_template(self) -> Callable[ + [datacatalog.CreateTagTemplateRequest], + tags.TagTemplate]: + r"""Return a callable for the create tag template method over gRPC. + + Creates a tag template. The user should enable the Data Catalog + API in the project identified by the ``parent`` parameter (see + `Data Catalog Resource + Project `__ + for more information). + + Returns: + Callable[[~.CreateTagTemplateRequest], + ~.TagTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_tag_template' not in self._stubs: + self._stubs['create_tag_template'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/CreateTagTemplate', + request_serializer=datacatalog.CreateTagTemplateRequest.serialize, + response_deserializer=tags.TagTemplate.deserialize, + ) + return self._stubs['create_tag_template'] + + @property + def get_tag_template(self) -> Callable[ + [datacatalog.GetTagTemplateRequest], + tags.TagTemplate]: + r"""Return a callable for the get tag template method over gRPC. + + Gets a tag template. + + Returns: + Callable[[~.GetTagTemplateRequest], + ~.TagTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_tag_template' not in self._stubs: + self._stubs['get_tag_template'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/GetTagTemplate', + request_serializer=datacatalog.GetTagTemplateRequest.serialize, + response_deserializer=tags.TagTemplate.deserialize, + ) + return self._stubs['get_tag_template'] + + @property + def update_tag_template(self) -> Callable[ + [datacatalog.UpdateTagTemplateRequest], + tags.TagTemplate]: + r"""Return a callable for the update tag template method over gRPC. + + Updates a tag template. This method cannot be used to update the + fields of a template. The tag template fields are represented as + separate resources and should be updated using their own + create/update/delete methods. Users should enable the Data + Catalog API in the project identified by the + ``tag_template.name`` parameter (see [Data Catalog Resource + Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + Returns: + Callable[[~.UpdateTagTemplateRequest], + ~.TagTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_tag_template' not in self._stubs: + self._stubs['update_tag_template'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/UpdateTagTemplate', + request_serializer=datacatalog.UpdateTagTemplateRequest.serialize, + response_deserializer=tags.TagTemplate.deserialize, + ) + return self._stubs['update_tag_template'] + + @property + def delete_tag_template(self) -> Callable[ + [datacatalog.DeleteTagTemplateRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete tag template method over gRPC. + + Deletes a tag template and all tags using the template. Users + should enable the Data Catalog API in the project identified by + the ``name`` parameter (see [Data Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + Returns: + Callable[[~.DeleteTagTemplateRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_tag_template' not in self._stubs: + self._stubs['delete_tag_template'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteTagTemplate', + request_serializer=datacatalog.DeleteTagTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_tag_template'] + + @property + def create_tag_template_field(self) -> Callable[ + [datacatalog.CreateTagTemplateFieldRequest], + tags.TagTemplateField]: + r"""Return a callable for the create tag template field method over gRPC. + + Creates a field in a tag template. The user should enable the + Data Catalog API in the project identified by the ``parent`` + parameter (see `Data Catalog Resource + Project `__ + for more information). + + Returns: + Callable[[~.CreateTagTemplateFieldRequest], + ~.TagTemplateField]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_tag_template_field' not in self._stubs: + self._stubs['create_tag_template_field'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/CreateTagTemplateField', + request_serializer=datacatalog.CreateTagTemplateFieldRequest.serialize, + response_deserializer=tags.TagTemplateField.deserialize, + ) + return self._stubs['create_tag_template_field'] + + @property + def update_tag_template_field(self) -> Callable[ + [datacatalog.UpdateTagTemplateFieldRequest], + tags.TagTemplateField]: + r"""Return a callable for the update tag template field method over gRPC. + + Updates a field in a tag template. This method cannot be used to + update the field type. Users should enable the Data Catalog API + in the project identified by the ``name`` parameter (see [Data + Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + Returns: + Callable[[~.UpdateTagTemplateFieldRequest], + ~.TagTemplateField]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_tag_template_field' not in self._stubs: + self._stubs['update_tag_template_field'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/UpdateTagTemplateField', + request_serializer=datacatalog.UpdateTagTemplateFieldRequest.serialize, + response_deserializer=tags.TagTemplateField.deserialize, + ) + return self._stubs['update_tag_template_field'] + + @property + def rename_tag_template_field(self) -> Callable[ + [datacatalog.RenameTagTemplateFieldRequest], + tags.TagTemplateField]: + r"""Return a callable for the rename tag template field method over gRPC. + + Renames a field in a tag template. The user should enable the + Data Catalog API in the project identified by the ``name`` + parameter (see `Data Catalog Resource + Project `__ + for more information). + + Returns: + Callable[[~.RenameTagTemplateFieldRequest], + ~.TagTemplateField]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'rename_tag_template_field' not in self._stubs: + self._stubs['rename_tag_template_field'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/RenameTagTemplateField', + request_serializer=datacatalog.RenameTagTemplateFieldRequest.serialize, + response_deserializer=tags.TagTemplateField.deserialize, + ) + return self._stubs['rename_tag_template_field'] + + @property + def rename_tag_template_field_enum_value(self) -> Callable[ + [datacatalog.RenameTagTemplateFieldEnumValueRequest], + tags.TagTemplateField]: + r"""Return a callable for the rename tag template field enum + value method over gRPC. + + Renames an enum value in a tag template. The enum + values have to be unique within one enum field. Thus, an + enum value cannot be renamed with a name used in any + other enum value within the same enum field. + + Returns: + Callable[[~.RenameTagTemplateFieldEnumValueRequest], + ~.TagTemplateField]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'rename_tag_template_field_enum_value' not in self._stubs: + self._stubs['rename_tag_template_field_enum_value'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/RenameTagTemplateFieldEnumValue', + request_serializer=datacatalog.RenameTagTemplateFieldEnumValueRequest.serialize, + response_deserializer=tags.TagTemplateField.deserialize, + ) + return self._stubs['rename_tag_template_field_enum_value'] + + @property + def delete_tag_template_field(self) -> Callable[ + [datacatalog.DeleteTagTemplateFieldRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete tag template field method over gRPC. + + Deletes a field in a tag template and all uses of that field. + Users should enable the Data Catalog API in the project + identified by the ``name`` parameter (see [Data Catalog Resource + Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + Returns: + Callable[[~.DeleteTagTemplateFieldRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_tag_template_field' not in self._stubs: + self._stubs['delete_tag_template_field'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteTagTemplateField', + request_serializer=datacatalog.DeleteTagTemplateFieldRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_tag_template_field'] + + @property + def create_tag(self) -> Callable[ + [datacatalog.CreateTagRequest], + tags.Tag]: + r"""Return a callable for the create tag method over gRPC. + + Creates a tag on an + [Entry][google.cloud.datacatalog.v1beta1.Entry]. Note: The + project identified by the ``parent`` parameter for the + `tag `__ + and the `tag + template `__ + used to create the tag must be from the same organization. + + Returns: + Callable[[~.CreateTagRequest], + ~.Tag]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_tag' not in self._stubs: + self._stubs['create_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/CreateTag', + request_serializer=datacatalog.CreateTagRequest.serialize, + response_deserializer=tags.Tag.deserialize, + ) + return self._stubs['create_tag'] + + @property + def update_tag(self) -> Callable[ + [datacatalog.UpdateTagRequest], + tags.Tag]: + r"""Return a callable for the update tag method over gRPC. + + Updates an existing tag. + + Returns: + Callable[[~.UpdateTagRequest], + ~.Tag]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_tag' not in self._stubs: + self._stubs['update_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/UpdateTag', + request_serializer=datacatalog.UpdateTagRequest.serialize, + response_deserializer=tags.Tag.deserialize, + ) + return self._stubs['update_tag'] + + @property + def delete_tag(self) -> Callable[ + [datacatalog.DeleteTagRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete tag method over gRPC. + + Deletes a tag. + + Returns: + Callable[[~.DeleteTagRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_tag' not in self._stubs: + self._stubs['delete_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteTag', + request_serializer=datacatalog.DeleteTagRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_tag'] + + @property + def list_tags(self) -> Callable[ + [datacatalog.ListTagsRequest], + datacatalog.ListTagsResponse]: + r"""Return a callable for the list tags method over gRPC. + + Lists tags assigned to an + [Entry][google.cloud.datacatalog.v1beta1.Entry]. The + [columns][google.cloud.datacatalog.v1beta1.Tag.column] in the + response are lowercased. + + Returns: + Callable[[~.ListTagsRequest], + ~.ListTagsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_tags' not in self._stubs: + self._stubs['list_tags'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/ListTags', + request_serializer=datacatalog.ListTagsRequest.serialize, + response_deserializer=datacatalog.ListTagsResponse.deserialize, + ) + return self._stubs['list_tags'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy for a resource. Replaces any + existing policy. Supported resources are: + + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage + policies for BigQuery, Pub/Sub and any external Google Cloud + Platform resources synced to Data Catalog. + + Callers must have following Google IAM permission + + - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on + tag templates. + - ``datacatalog.entries.setIamPolicy`` to set policies on + entries. + - ``datacatalog.entryGroups.setIamPolicy`` to set policies on + entry groups. + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a resource. A ``NOT_FOUND`` + error is returned if the resource does not exist. An empty + policy is returned if the resource exists but does not have a + policy set on it. + + Supported resources are: + + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage + policies for BigQuery, Pub/Sub and any external Google Cloud + Platform resources synced to Data Catalog. + + Callers must have following Google IAM permission + + - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on + tag templates. + - ``datacatalog.entries.getIamPolicy`` to get policies on + entries. + - ``datacatalog.entryGroups.getIamPolicy`` to get policies on + entry groups. + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns the caller's permissions on a resource. If the resource + does not exist, an empty set of permissions is returned (We + don't return a ``NOT_FOUND`` error). + + Supported resources are: + + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage + policies for BigQuery, Pub/Sub and any external Google Cloud + Platform resources synced to Data Catalog. + + A caller is not required to have Google IAM permission to make + this request. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'DataCatalogGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py new file mode 100644 index 000000000000..076ac6bd0899 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py @@ -0,0 +1,1121 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.datacatalog_v1beta1.types import datacatalog +from google.cloud.datacatalog_v1beta1.types import tags +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO +from .grpc import DataCatalogGrpcTransport + + +class DataCatalogGrpcAsyncIOTransport(DataCatalogTransport): + """gRPC AsyncIO backend transport for DataCatalog. + + Data Catalog API service allows clients to discover, + understand, and manage their data. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def search_catalog(self) -> Callable[ + [datacatalog.SearchCatalogRequest], + Awaitable[datacatalog.SearchCatalogResponse]]: + r"""Return a callable for the search catalog method over gRPC. + + Searches Data Catalog for multiple resources like entries, tags + that match a query. + + This is a custom method + (https://cloud.google.com/apis/design/custom_methods) and does + not return the complete resource, only the resource identifier + and high level fields. Clients can subsequently call ``Get`` + methods. + + Note that Data Catalog search queries do not guarantee full + recall. Query results that match your query may not be returned, + even in subsequent result pages. Also note that results returned + (and not returned) can vary across repeated search queries. + + See `Data Catalog Search + Syntax `__ + for more information. + + Returns: + Callable[[~.SearchCatalogRequest], + Awaitable[~.SearchCatalogResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'search_catalog' not in self._stubs: + self._stubs['search_catalog'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/SearchCatalog', + request_serializer=datacatalog.SearchCatalogRequest.serialize, + response_deserializer=datacatalog.SearchCatalogResponse.deserialize, + ) + return self._stubs['search_catalog'] + + @property + def create_entry_group(self) -> Callable[ + [datacatalog.CreateEntryGroupRequest], + Awaitable[datacatalog.EntryGroup]]: + r"""Return a callable for the create entry group method over gRPC. + + A maximum of 10,000 entry groups may be created per organization + across all locations. + + Users should enable the Data Catalog API in the project + identified by the ``parent`` parameter (see [Data Catalog + Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + Returns: + Callable[[~.CreateEntryGroupRequest], + Awaitable[~.EntryGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entry_group' not in self._stubs: + self._stubs['create_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/CreateEntryGroup', + request_serializer=datacatalog.CreateEntryGroupRequest.serialize, + response_deserializer=datacatalog.EntryGroup.deserialize, + ) + return self._stubs['create_entry_group'] + + @property + def update_entry_group(self) -> Callable[ + [datacatalog.UpdateEntryGroupRequest], + Awaitable[datacatalog.EntryGroup]]: + r"""Return a callable for the update entry group method over gRPC. + + Updates an EntryGroup. The user should enable the Data Catalog + API in the project identified by the ``entry_group.name`` + parameter (see [Data Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + Returns: + Callable[[~.UpdateEntryGroupRequest], + Awaitable[~.EntryGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entry_group' not in self._stubs: + self._stubs['update_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/UpdateEntryGroup', + request_serializer=datacatalog.UpdateEntryGroupRequest.serialize, + response_deserializer=datacatalog.EntryGroup.deserialize, + ) + return self._stubs['update_entry_group'] + + @property + def get_entry_group(self) -> Callable[ + [datacatalog.GetEntryGroupRequest], + Awaitable[datacatalog.EntryGroup]]: + r"""Return a callable for the get entry group method over gRPC. + + Gets an EntryGroup. + + Returns: + Callable[[~.GetEntryGroupRequest], + Awaitable[~.EntryGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entry_group' not in self._stubs: + self._stubs['get_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/GetEntryGroup', + request_serializer=datacatalog.GetEntryGroupRequest.serialize, + response_deserializer=datacatalog.EntryGroup.deserialize, + ) + return self._stubs['get_entry_group'] + + @property + def delete_entry_group(self) -> Callable[ + [datacatalog.DeleteEntryGroupRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete entry group method over gRPC. + + Deletes an EntryGroup. Only entry groups that do not contain + entries can be deleted. Users should enable the Data Catalog API + in the project identified by the ``name`` parameter (see [Data + Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + Returns: + Callable[[~.DeleteEntryGroupRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entry_group' not in self._stubs: + self._stubs['delete_entry_group'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteEntryGroup', + request_serializer=datacatalog.DeleteEntryGroupRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_entry_group'] + + @property + def list_entry_groups(self) -> Callable[ + [datacatalog.ListEntryGroupsRequest], + Awaitable[datacatalog.ListEntryGroupsResponse]]: + r"""Return a callable for the list entry groups method over gRPC. + + Lists entry groups. + + Returns: + Callable[[~.ListEntryGroupsRequest], + Awaitable[~.ListEntryGroupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entry_groups' not in self._stubs: + self._stubs['list_entry_groups'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/ListEntryGroups', + request_serializer=datacatalog.ListEntryGroupsRequest.serialize, + response_deserializer=datacatalog.ListEntryGroupsResponse.deserialize, + ) + return self._stubs['list_entry_groups'] + + @property + def create_entry(self) -> Callable[ + [datacatalog.CreateEntryRequest], + Awaitable[datacatalog.Entry]]: + r"""Return a callable for the create entry method over gRPC. + + Creates an entry. Only entries of 'FILESET' type or + user-specified type can be created. + + Users should enable the Data Catalog API in the project + identified by the ``parent`` parameter (see [Data Catalog + Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + A maximum of 100,000 entries may be created per entry group. + + Returns: + Callable[[~.CreateEntryRequest], + Awaitable[~.Entry]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_entry' not in self._stubs: + self._stubs['create_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/CreateEntry', + request_serializer=datacatalog.CreateEntryRequest.serialize, + response_deserializer=datacatalog.Entry.deserialize, + ) + return self._stubs['create_entry'] + + @property + def update_entry(self) -> Callable[ + [datacatalog.UpdateEntryRequest], + Awaitable[datacatalog.Entry]]: + r"""Return a callable for the update entry method over gRPC. + + Updates an existing entry. Users should enable the Data Catalog + API in the project identified by the ``entry.name`` parameter + (see [Data Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + Returns: + Callable[[~.UpdateEntryRequest], + Awaitable[~.Entry]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_entry' not in self._stubs: + self._stubs['update_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/UpdateEntry', + request_serializer=datacatalog.UpdateEntryRequest.serialize, + response_deserializer=datacatalog.Entry.deserialize, + ) + return self._stubs['update_entry'] + + @property + def delete_entry(self) -> Callable[ + [datacatalog.DeleteEntryRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete entry method over gRPC. + + Deletes an existing entry. Only entries created through + [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry] + method can be deleted. Users should enable the Data Catalog API + in the project identified by the ``name`` parameter (see [Data + Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + Returns: + Callable[[~.DeleteEntryRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_entry' not in self._stubs: + self._stubs['delete_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteEntry', + request_serializer=datacatalog.DeleteEntryRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_entry'] + + @property + def get_entry(self) -> Callable[ + [datacatalog.GetEntryRequest], + Awaitable[datacatalog.Entry]]: + r"""Return a callable for the get entry method over gRPC. + + Gets an entry. + + Returns: + Callable[[~.GetEntryRequest], + Awaitable[~.Entry]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_entry' not in self._stubs: + self._stubs['get_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/GetEntry', + request_serializer=datacatalog.GetEntryRequest.serialize, + response_deserializer=datacatalog.Entry.deserialize, + ) + return self._stubs['get_entry'] + + @property + def lookup_entry(self) -> Callable[ + [datacatalog.LookupEntryRequest], + Awaitable[datacatalog.Entry]]: + r"""Return a callable for the lookup entry method over gRPC. + + Get an entry by target resource name. This method + allows clients to use the resource name from the source + Google Cloud Platform service to get the Data Catalog + Entry. + + Returns: + Callable[[~.LookupEntryRequest], + Awaitable[~.Entry]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'lookup_entry' not in self._stubs: + self._stubs['lookup_entry'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/LookupEntry', + request_serializer=datacatalog.LookupEntryRequest.serialize, + response_deserializer=datacatalog.Entry.deserialize, + ) + return self._stubs['lookup_entry'] + + @property + def list_entries(self) -> Callable[ + [datacatalog.ListEntriesRequest], + Awaitable[datacatalog.ListEntriesResponse]]: + r"""Return a callable for the list entries method over gRPC. + + Lists entries. + + Returns: + Callable[[~.ListEntriesRequest], + Awaitable[~.ListEntriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_entries' not in self._stubs: + self._stubs['list_entries'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/ListEntries', + request_serializer=datacatalog.ListEntriesRequest.serialize, + response_deserializer=datacatalog.ListEntriesResponse.deserialize, + ) + return self._stubs['list_entries'] + + @property + def create_tag_template(self) -> Callable[ + [datacatalog.CreateTagTemplateRequest], + Awaitable[tags.TagTemplate]]: + r"""Return a callable for the create tag template method over gRPC. + + Creates a tag template. The user should enable the Data Catalog + API in the project identified by the ``parent`` parameter (see + `Data Catalog Resource + Project `__ + for more information). + + Returns: + Callable[[~.CreateTagTemplateRequest], + Awaitable[~.TagTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_tag_template' not in self._stubs: + self._stubs['create_tag_template'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/CreateTagTemplate', + request_serializer=datacatalog.CreateTagTemplateRequest.serialize, + response_deserializer=tags.TagTemplate.deserialize, + ) + return self._stubs['create_tag_template'] + + @property + def get_tag_template(self) -> Callable[ + [datacatalog.GetTagTemplateRequest], + Awaitable[tags.TagTemplate]]: + r"""Return a callable for the get tag template method over gRPC. + + Gets a tag template. + + Returns: + Callable[[~.GetTagTemplateRequest], + Awaitable[~.TagTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_tag_template' not in self._stubs: + self._stubs['get_tag_template'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/GetTagTemplate', + request_serializer=datacatalog.GetTagTemplateRequest.serialize, + response_deserializer=tags.TagTemplate.deserialize, + ) + return self._stubs['get_tag_template'] + + @property + def update_tag_template(self) -> Callable[ + [datacatalog.UpdateTagTemplateRequest], + Awaitable[tags.TagTemplate]]: + r"""Return a callable for the update tag template method over gRPC. + + Updates a tag template. This method cannot be used to update the + fields of a template. The tag template fields are represented as + separate resources and should be updated using their own + create/update/delete methods. Users should enable the Data + Catalog API in the project identified by the + ``tag_template.name`` parameter (see [Data Catalog Resource + Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + Returns: + Callable[[~.UpdateTagTemplateRequest], + Awaitable[~.TagTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_tag_template' not in self._stubs: + self._stubs['update_tag_template'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/UpdateTagTemplate', + request_serializer=datacatalog.UpdateTagTemplateRequest.serialize, + response_deserializer=tags.TagTemplate.deserialize, + ) + return self._stubs['update_tag_template'] + + @property + def delete_tag_template(self) -> Callable[ + [datacatalog.DeleteTagTemplateRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete tag template method over gRPC. + + Deletes a tag template and all tags using the template. Users + should enable the Data Catalog API in the project identified by + the ``name`` parameter (see [Data Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + Returns: + Callable[[~.DeleteTagTemplateRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_tag_template' not in self._stubs: + self._stubs['delete_tag_template'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteTagTemplate', + request_serializer=datacatalog.DeleteTagTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_tag_template'] + + @property + def create_tag_template_field(self) -> Callable[ + [datacatalog.CreateTagTemplateFieldRequest], + Awaitable[tags.TagTemplateField]]: + r"""Return a callable for the create tag template field method over gRPC. + + Creates a field in a tag template. The user should enable the + Data Catalog API in the project identified by the ``parent`` + parameter (see `Data Catalog Resource + Project `__ + for more information). + + Returns: + Callable[[~.CreateTagTemplateFieldRequest], + Awaitable[~.TagTemplateField]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_tag_template_field' not in self._stubs: + self._stubs['create_tag_template_field'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/CreateTagTemplateField', + request_serializer=datacatalog.CreateTagTemplateFieldRequest.serialize, + response_deserializer=tags.TagTemplateField.deserialize, + ) + return self._stubs['create_tag_template_field'] + + @property + def update_tag_template_field(self) -> Callable[ + [datacatalog.UpdateTagTemplateFieldRequest], + Awaitable[tags.TagTemplateField]]: + r"""Return a callable for the update tag template field method over gRPC. + + Updates a field in a tag template. This method cannot be used to + update the field type. Users should enable the Data Catalog API + in the project identified by the ``name`` parameter (see [Data + Catalog Resource Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + Returns: + Callable[[~.UpdateTagTemplateFieldRequest], + Awaitable[~.TagTemplateField]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_tag_template_field' not in self._stubs: + self._stubs['update_tag_template_field'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/UpdateTagTemplateField', + request_serializer=datacatalog.UpdateTagTemplateFieldRequest.serialize, + response_deserializer=tags.TagTemplateField.deserialize, + ) + return self._stubs['update_tag_template_field'] + + @property + def rename_tag_template_field(self) -> Callable[ + [datacatalog.RenameTagTemplateFieldRequest], + Awaitable[tags.TagTemplateField]]: + r"""Return a callable for the rename tag template field method over gRPC. + + Renames a field in a tag template. The user should enable the + Data Catalog API in the project identified by the ``name`` + parameter (see `Data Catalog Resource + Project `__ + for more information). + + Returns: + Callable[[~.RenameTagTemplateFieldRequest], + Awaitable[~.TagTemplateField]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'rename_tag_template_field' not in self._stubs: + self._stubs['rename_tag_template_field'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/RenameTagTemplateField', + request_serializer=datacatalog.RenameTagTemplateFieldRequest.serialize, + response_deserializer=tags.TagTemplateField.deserialize, + ) + return self._stubs['rename_tag_template_field'] + + @property + def rename_tag_template_field_enum_value(self) -> Callable[ + [datacatalog.RenameTagTemplateFieldEnumValueRequest], + Awaitable[tags.TagTemplateField]]: + r"""Return a callable for the rename tag template field enum + value method over gRPC. + + Renames an enum value in a tag template. The enum + values have to be unique within one enum field. Thus, an + enum value cannot be renamed with a name used in any + other enum value within the same enum field. + + Returns: + Callable[[~.RenameTagTemplateFieldEnumValueRequest], + Awaitable[~.TagTemplateField]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'rename_tag_template_field_enum_value' not in self._stubs: + self._stubs['rename_tag_template_field_enum_value'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/RenameTagTemplateFieldEnumValue', + request_serializer=datacatalog.RenameTagTemplateFieldEnumValueRequest.serialize, + response_deserializer=tags.TagTemplateField.deserialize, + ) + return self._stubs['rename_tag_template_field_enum_value'] + + @property + def delete_tag_template_field(self) -> Callable[ + [datacatalog.DeleteTagTemplateFieldRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete tag template field method over gRPC. + + Deletes a field in a tag template and all uses of that field. + Users should enable the Data Catalog API in the project + identified by the ``name`` parameter (see [Data Catalog Resource + Project] + (https://cloud.google.com/data-catalog/docs/concepts/resource-project) + for more information). + + Returns: + Callable[[~.DeleteTagTemplateFieldRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_tag_template_field' not in self._stubs: + self._stubs['delete_tag_template_field'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteTagTemplateField', + request_serializer=datacatalog.DeleteTagTemplateFieldRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_tag_template_field'] + + @property + def create_tag(self) -> Callable[ + [datacatalog.CreateTagRequest], + Awaitable[tags.Tag]]: + r"""Return a callable for the create tag method over gRPC. + + Creates a tag on an + [Entry][google.cloud.datacatalog.v1beta1.Entry]. Note: The + project identified by the ``parent`` parameter for the + `tag `__ + and the `tag + template `__ + used to create the tag must be from the same organization. + + Returns: + Callable[[~.CreateTagRequest], + Awaitable[~.Tag]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_tag' not in self._stubs: + self._stubs['create_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/CreateTag', + request_serializer=datacatalog.CreateTagRequest.serialize, + response_deserializer=tags.Tag.deserialize, + ) + return self._stubs['create_tag'] + + @property + def update_tag(self) -> Callable[ + [datacatalog.UpdateTagRequest], + Awaitable[tags.Tag]]: + r"""Return a callable for the update tag method over gRPC. + + Updates an existing tag. + + Returns: + Callable[[~.UpdateTagRequest], + Awaitable[~.Tag]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_tag' not in self._stubs: + self._stubs['update_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/UpdateTag', + request_serializer=datacatalog.UpdateTagRequest.serialize, + response_deserializer=tags.Tag.deserialize, + ) + return self._stubs['update_tag'] + + @property + def delete_tag(self) -> Callable[ + [datacatalog.DeleteTagRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete tag method over gRPC. + + Deletes a tag. + + Returns: + Callable[[~.DeleteTagRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_tag' not in self._stubs: + self._stubs['delete_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteTag', + request_serializer=datacatalog.DeleteTagRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_tag'] + + @property + def list_tags(self) -> Callable[ + [datacatalog.ListTagsRequest], + Awaitable[datacatalog.ListTagsResponse]]: + r"""Return a callable for the list tags method over gRPC. + + Lists tags assigned to an + [Entry][google.cloud.datacatalog.v1beta1.Entry]. The + [columns][google.cloud.datacatalog.v1beta1.Tag.column] in the + response are lowercased. + + Returns: + Callable[[~.ListTagsRequest], + Awaitable[~.ListTagsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_tags' not in self._stubs: + self._stubs['list_tags'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/ListTags', + request_serializer=datacatalog.ListTagsRequest.serialize, + response_deserializer=datacatalog.ListTagsResponse.deserialize, + ) + return self._stubs['list_tags'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the access control policy for a resource. Replaces any + existing policy. Supported resources are: + + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage + policies for BigQuery, Pub/Sub and any external Google Cloud + Platform resources synced to Data Catalog. + + Callers must have following Google IAM permission + + - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on + tag templates. + - ``datacatalog.entries.setIamPolicy`` to set policies on + entries. + - ``datacatalog.entryGroups.setIamPolicy`` to set policies on + entry groups. + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a resource. A ``NOT_FOUND`` + error is returned if the resource does not exist. An empty + policy is returned if the resource exists but does not have a + policy set on it. + + Supported resources are: + + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage + policies for BigQuery, Pub/Sub and any external Google Cloud + Platform resources synced to Data Catalog. + + Callers must have following Google IAM permission + + - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on + tag templates. + - ``datacatalog.entries.getIamPolicy`` to get policies on + entries. + - ``datacatalog.entryGroups.getIamPolicy`` to get policies on + entry groups. + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns the caller's permissions on a resource. If the resource + does not exist, an empty set of permissions is returned (We + don't return a ``NOT_FOUND`` error). + + Supported resources are: + + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage + policies for BigQuery, Pub/Sub and any external Google Cloud + Platform resources synced to Data Catalog. + + A caller is not required to have Google IAM permission to make + this request. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.DataCatalog/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'DataCatalogGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/__init__.py new file mode 100644 index 000000000000..cde5f3a05abb --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import PolicyTagManagerClient +from .async_client import PolicyTagManagerAsyncClient + +__all__ = ( + 'PolicyTagManagerClient', + 'PolicyTagManagerAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py new file mode 100644 index 000000000000..e79b1a312ca2 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py @@ -0,0 +1,1582 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.datacatalog_v1beta1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.datacatalog_v1beta1.services.policy_tag_manager import pagers +from google.cloud.datacatalog_v1beta1.types import policytagmanager +from google.cloud.datacatalog_v1beta1.types import timestamps +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .transports.base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport +from .client import PolicyTagManagerClient + + +class PolicyTagManagerAsyncClient: + """The policy tag manager API service allows clients to manage + their taxonomies and policy tags. + """ + + _client: PolicyTagManagerClient + + DEFAULT_ENDPOINT = PolicyTagManagerClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PolicyTagManagerClient.DEFAULT_MTLS_ENDPOINT + + policy_tag_path = staticmethod(PolicyTagManagerClient.policy_tag_path) + parse_policy_tag_path = staticmethod(PolicyTagManagerClient.parse_policy_tag_path) + taxonomy_path = staticmethod(PolicyTagManagerClient.taxonomy_path) + parse_taxonomy_path = staticmethod(PolicyTagManagerClient.parse_taxonomy_path) + common_billing_account_path = staticmethod(PolicyTagManagerClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(PolicyTagManagerClient.parse_common_billing_account_path) + common_folder_path = staticmethod(PolicyTagManagerClient.common_folder_path) + parse_common_folder_path = staticmethod(PolicyTagManagerClient.parse_common_folder_path) + common_organization_path = staticmethod(PolicyTagManagerClient.common_organization_path) + parse_common_organization_path = staticmethod(PolicyTagManagerClient.parse_common_organization_path) + common_project_path = staticmethod(PolicyTagManagerClient.common_project_path) + parse_common_project_path = staticmethod(PolicyTagManagerClient.parse_common_project_path) + common_location_path = staticmethod(PolicyTagManagerClient.common_location_path) + parse_common_location_path = staticmethod(PolicyTagManagerClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerAsyncClient: The constructed client. + """ + return PolicyTagManagerClient.from_service_account_info.__func__(PolicyTagManagerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerAsyncClient: The constructed client. + """ + return PolicyTagManagerClient.from_service_account_file.__func__(PolicyTagManagerAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return PolicyTagManagerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> PolicyTagManagerTransport: + """Returns the transport used by the client instance. + + Returns: + PolicyTagManagerTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(PolicyTagManagerClient).get_transport_class, type(PolicyTagManagerClient)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, PolicyTagManagerTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the policy tag manager client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.PolicyTagManagerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = PolicyTagManagerClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def create_taxonomy(self, + request: Optional[Union[policytagmanager.CreateTaxonomyRequest, dict]] = None, + *, + parent: Optional[str] = None, + taxonomy: Optional[policytagmanager.Taxonomy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Creates a taxonomy in the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_create_taxonomy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.CreateTaxonomyRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_taxonomy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.CreateTaxonomyRequest, dict]]): + The request object. Request message for + [CreateTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.CreateTaxonomy]. + parent (:class:`str`): + Required. Resource name of the + project that the taxonomy will belong + to. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + taxonomy (:class:`google.cloud.datacatalog_v1beta1.types.Taxonomy`): + The taxonomy to be created. + This corresponds to the ``taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.Taxonomy: + A taxonomy is a collection of policy tags that classify data along a common + axis. For instance a data *sensitivity* taxonomy + could contain policy tags denoting PII such as age, + zipcode, and SSN. A data *origin* taxonomy could + contain policy tags to distinguish user data, + employee data, partner data, public data. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, taxonomy]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = policytagmanager.CreateTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if taxonomy is not None: + request.taxonomy = taxonomy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_taxonomy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_taxonomy(self, + request: Optional[Union[policytagmanager.DeleteTaxonomyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a taxonomy. This operation will also delete + all policy tags in this taxonomy along with their + associated policies. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_delete_taxonomy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteTaxonomyRequest( + name="name_value", + ) + + # Make the request + await client.delete_taxonomy(request=request) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.DeleteTaxonomyRequest, dict]]): + The request object. Request message for + [DeleteTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.DeleteTaxonomy]. + name (:class:`str`): + Required. Resource name of the + taxonomy to be deleted. All policy tags + in this taxonomy will also be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = policytagmanager.DeleteTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_taxonomy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def update_taxonomy(self, + request: Optional[Union[policytagmanager.UpdateTaxonomyRequest, dict]] = None, + *, + taxonomy: Optional[policytagmanager.Taxonomy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Updates a taxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_update_taxonomy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.UpdateTaxonomyRequest( + ) + + # Make the request + response = await client.update_taxonomy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.UpdateTaxonomyRequest, dict]]): + The request object. Request message for + [UpdateTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.UpdateTaxonomy]. + taxonomy (:class:`google.cloud.datacatalog_v1beta1.types.Taxonomy`): + The taxonomy to update. Only description, display_name, + and activated policy types can be updated. + + This corresponds to the ``taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.Taxonomy: + A taxonomy is a collection of policy tags that classify data along a common + axis. For instance a data *sensitivity* taxonomy + could contain policy tags denoting PII such as age, + zipcode, and SSN. A data *origin* taxonomy could + contain policy tags to distinguish user data, + employee data, partner data, public data. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([taxonomy]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = policytagmanager.UpdateTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if taxonomy is not None: + request.taxonomy = taxonomy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_taxonomy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("taxonomy.name", request.taxonomy.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_taxonomies(self, + request: Optional[Union[policytagmanager.ListTaxonomiesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTaxonomiesAsyncPager: + r"""Lists all taxonomies in a project in a particular + location that the caller has permission to view. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_list_taxonomies(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ListTaxonomiesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_taxonomies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.ListTaxonomiesRequest, dict]]): + The request object. Request message for + [ListTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListTaxonomies]. + parent (:class:`str`): + Required. Resource name of the + project to list the taxonomies of. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.services.policy_tag_manager.pagers.ListTaxonomiesAsyncPager: + Response message for + [ListTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListTaxonomies]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = policytagmanager.ListTaxonomiesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_taxonomies, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTaxonomiesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_taxonomy(self, + request: Optional[Union[policytagmanager.GetTaxonomyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Gets a taxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_get_taxonomy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.GetTaxonomyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_taxonomy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.GetTaxonomyRequest, dict]]): + The request object. Request message for + [GetTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.GetTaxonomy]. + name (:class:`str`): + Required. Resource name of the + requested taxonomy. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.Taxonomy: + A taxonomy is a collection of policy tags that classify data along a common + axis. For instance a data *sensitivity* taxonomy + could contain policy tags denoting PII such as age, + zipcode, and SSN. A data *origin* taxonomy could + contain policy tags to distinguish user data, + employee data, partner data, public data. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = policytagmanager.GetTaxonomyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_taxonomy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_policy_tag(self, + request: Optional[Union[policytagmanager.CreatePolicyTagRequest, dict]] = None, + *, + parent: Optional[str] = None, + policy_tag: Optional[policytagmanager.PolicyTag] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Creates a policy tag in the specified taxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_create_policy_tag(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.CreatePolicyTagRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_policy_tag(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.CreatePolicyTagRequest, dict]]): + The request object. Request message for + [CreatePolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.CreatePolicyTag]. + parent (:class:`str`): + Required. Resource name of the + taxonomy that the policy tag will belong + to. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + policy_tag (:class:`google.cloud.datacatalog_v1beta1.types.PolicyTag`): + The policy tag to be created. + This corresponds to the ``policy_tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.PolicyTag: + Denotes one policy tag in a taxonomy + (e.g. ssn). Policy Tags can be defined + in a hierarchy. For example, consider + the following hierarchy: + + Geolocation -> (LatLong, City, + ZipCode). PolicyTag "Geolocation" + contains three child policy tags: + "LatLong", "City", and "ZipCode". + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, policy_tag]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = policytagmanager.CreatePolicyTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if policy_tag is not None: + request.policy_tag = policy_tag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_policy_tag, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_policy_tag(self, + request: Optional[Union[policytagmanager.DeletePolicyTagRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a policy tag. Also deletes all of its + descendant policy tags. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_delete_policy_tag(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeletePolicyTagRequest( + name="name_value", + ) + + # Make the request + await client.delete_policy_tag(request=request) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.DeletePolicyTagRequest, dict]]): + The request object. Request message for + [DeletePolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.DeletePolicyTag]. + name (:class:`str`): + Required. Resource name of the policy + tag to be deleted. All of its descendant + policy tags will also be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = policytagmanager.DeletePolicyTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_policy_tag, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def update_policy_tag(self, + request: Optional[Union[policytagmanager.UpdatePolicyTagRequest, dict]] = None, + *, + policy_tag: Optional[policytagmanager.PolicyTag] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Updates a policy tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_update_policy_tag(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.UpdatePolicyTagRequest( + ) + + # Make the request + response = await client.update_policy_tag(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.UpdatePolicyTagRequest, dict]]): + The request object. Request message for + [UpdatePolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.UpdatePolicyTag]. + policy_tag (:class:`google.cloud.datacatalog_v1beta1.types.PolicyTag`): + The policy tag to update. Only the description, + display_name, and parent_policy_tag fields can be + updated. + + This corresponds to the ``policy_tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.PolicyTag: + Denotes one policy tag in a taxonomy + (e.g. ssn). Policy Tags can be defined + in a hierarchy. For example, consider + the following hierarchy: + + Geolocation -> (LatLong, City, + ZipCode). PolicyTag "Geolocation" + contains three child policy tags: + "LatLong", "City", and "ZipCode". + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([policy_tag]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = policytagmanager.UpdatePolicyTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if policy_tag is not None: + request.policy_tag = policy_tag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_policy_tag, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("policy_tag.name", request.policy_tag.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_policy_tags(self, + request: Optional[Union[policytagmanager.ListPolicyTagsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPolicyTagsAsyncPager: + r"""Lists all policy tags in a taxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_list_policy_tags(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ListPolicyTagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_policy_tags(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.ListPolicyTagsRequest, dict]]): + The request object. Request message for + [ListPolicyTags][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListPolicyTags]. + parent (:class:`str`): + Required. Resource name of the + taxonomy to list the policy tags of. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.services.policy_tag_manager.pagers.ListPolicyTagsAsyncPager: + Response message for + [ListPolicyTags][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListPolicyTags]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = policytagmanager.ListPolicyTagsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_policy_tags, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPolicyTagsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_policy_tag(self, + request: Optional[Union[policytagmanager.GetPolicyTagRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Gets a policy tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_get_policy_tag(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.GetPolicyTagRequest( + name="name_value", + ) + + # Make the request + response = await client.get_policy_tag(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.GetPolicyTagRequest, dict]]): + The request object. Request message for + [GetPolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.GetPolicyTag]. + name (:class:`str`): + Required. Resource name of the + requested policy tag. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.PolicyTag: + Denotes one policy tag in a taxonomy + (e.g. ssn). Policy Tags can be defined + in a hierarchy. For example, consider + the following hierarchy: + + Geolocation -> (LatLong, City, + ZipCode). PolicyTag "Geolocation" + contains three child policy tags: + "LatLong", "City", and "ZipCode". + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = policytagmanager.GetPolicyTagRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_policy_tag, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy(self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM policy for a taxonomy or a policy tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_get_iam_policy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): + The request object. Request message for ``GetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy(self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM policy for a taxonomy or a policy tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_set_iam_policy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): + The request object. Request message for ``SetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions(self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns the permissions that a caller has on the + specified taxonomy or policy tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_test_iam_permissions(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "PolicyTagManagerAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "PolicyTagManagerAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py new file mode 100644 index 000000000000..c6b0980c4c39 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py @@ -0,0 +1,1796 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.datacatalog_v1beta1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.datacatalog_v1beta1.services.policy_tag_manager import pagers +from google.cloud.datacatalog_v1beta1.types import policytagmanager +from google.cloud.datacatalog_v1beta1.types import timestamps +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .transports.base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import PolicyTagManagerGrpcTransport +from .transports.grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport + + +class PolicyTagManagerClientMeta(type): + """Metaclass for the PolicyTagManager client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[PolicyTagManagerTransport]] + _transport_registry["grpc"] = PolicyTagManagerGrpcTransport + _transport_registry["grpc_asyncio"] = PolicyTagManagerGrpcAsyncIOTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[PolicyTagManagerTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PolicyTagManagerClient(metaclass=PolicyTagManagerClientMeta): + """The policy tag manager API service allows clients to manage + their taxonomies and policy tags. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "datacatalog.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PolicyTagManagerTransport: + """Returns the transport used by the client instance. + + Returns: + PolicyTagManagerTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def policy_tag_path(project: str,location: str,taxonomy: str,policy_tag: str,) -> str: + """Returns a fully-qualified policy_tag string.""" + return "projects/{project}/locations/{location}/taxonomies/{taxonomy}/policyTags/{policy_tag}".format(project=project, location=location, taxonomy=taxonomy, policy_tag=policy_tag, ) + + @staticmethod + def parse_policy_tag_path(path: str) -> Dict[str,str]: + """Parses a policy_tag path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/taxonomies/(?P.+?)/policyTags/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def taxonomy_path(project: str,location: str,taxonomy: str,) -> str: + """Returns a fully-qualified taxonomy string.""" + return "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format(project=project, location=location, taxonomy=taxonomy, ) + + @staticmethod + def parse_taxonomy_path(path: str) -> Dict[str,str]: + """Parses a taxonomy path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/taxonomies/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, PolicyTagManagerTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the policy tag manager client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, PolicyTagManagerTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, PolicyTagManagerTransport): + # transport is a PolicyTagManagerTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_taxonomy(self, + request: Optional[Union[policytagmanager.CreateTaxonomyRequest, dict]] = None, + *, + parent: Optional[str] = None, + taxonomy: Optional[policytagmanager.Taxonomy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Creates a taxonomy in the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_create_taxonomy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.CreateTaxonomyRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_taxonomy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.CreateTaxonomyRequest, dict]): + The request object. Request message for + [CreateTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.CreateTaxonomy]. + parent (str): + Required. Resource name of the + project that the taxonomy will belong + to. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + taxonomy (google.cloud.datacatalog_v1beta1.types.Taxonomy): + The taxonomy to be created. + This corresponds to the ``taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.Taxonomy: + A taxonomy is a collection of policy tags that classify data along a common + axis. For instance a data *sensitivity* taxonomy + could contain policy tags denoting PII such as age, + zipcode, and SSN. A data *origin* taxonomy could + contain policy tags to distinguish user data, + employee data, partner data, public data. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, taxonomy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.CreateTaxonomyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.CreateTaxonomyRequest): + request = policytagmanager.CreateTaxonomyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if taxonomy is not None: + request.taxonomy = taxonomy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_taxonomy(self, + request: Optional[Union[policytagmanager.DeleteTaxonomyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a taxonomy. This operation will also delete + all policy tags in this taxonomy along with their + associated policies. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_delete_taxonomy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteTaxonomyRequest( + name="name_value", + ) + + # Make the request + client.delete_taxonomy(request=request) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.DeleteTaxonomyRequest, dict]): + The request object. Request message for + [DeleteTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.DeleteTaxonomy]. + name (str): + Required. Resource name of the + taxonomy to be deleted. All policy tags + in this taxonomy will also be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.DeleteTaxonomyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.DeleteTaxonomyRequest): + request = policytagmanager.DeleteTaxonomyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def update_taxonomy(self, + request: Optional[Union[policytagmanager.UpdateTaxonomyRequest, dict]] = None, + *, + taxonomy: Optional[policytagmanager.Taxonomy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Updates a taxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_update_taxonomy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.UpdateTaxonomyRequest( + ) + + # Make the request + response = client.update_taxonomy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.UpdateTaxonomyRequest, dict]): + The request object. Request message for + [UpdateTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.UpdateTaxonomy]. + taxonomy (google.cloud.datacatalog_v1beta1.types.Taxonomy): + The taxonomy to update. Only description, display_name, + and activated policy types can be updated. + + This corresponds to the ``taxonomy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.Taxonomy: + A taxonomy is a collection of policy tags that classify data along a common + axis. For instance a data *sensitivity* taxonomy + could contain policy tags denoting PII such as age, + zipcode, and SSN. A data *origin* taxonomy could + contain policy tags to distinguish user data, + employee data, partner data, public data. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([taxonomy]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.UpdateTaxonomyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.UpdateTaxonomyRequest): + request = policytagmanager.UpdateTaxonomyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if taxonomy is not None: + request.taxonomy = taxonomy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("taxonomy.name", request.taxonomy.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_taxonomies(self, + request: Optional[Union[policytagmanager.ListTaxonomiesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTaxonomiesPager: + r"""Lists all taxonomies in a project in a particular + location that the caller has permission to view. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_list_taxonomies(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ListTaxonomiesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_taxonomies(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.ListTaxonomiesRequest, dict]): + The request object. Request message for + [ListTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListTaxonomies]. + parent (str): + Required. Resource name of the + project to list the taxonomies of. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.services.policy_tag_manager.pagers.ListTaxonomiesPager: + Response message for + [ListTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListTaxonomies]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.ListTaxonomiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.ListTaxonomiesRequest): + request = policytagmanager.ListTaxonomiesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_taxonomies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTaxonomiesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_taxonomy(self, + request: Optional[Union[policytagmanager.GetTaxonomyRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.Taxonomy: + r"""Gets a taxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_get_taxonomy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.GetTaxonomyRequest( + name="name_value", + ) + + # Make the request + response = client.get_taxonomy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.GetTaxonomyRequest, dict]): + The request object. Request message for + [GetTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.GetTaxonomy]. + name (str): + Required. Resource name of the + requested taxonomy. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.Taxonomy: + A taxonomy is a collection of policy tags that classify data along a common + axis. For instance a data *sensitivity* taxonomy + could contain policy tags denoting PII such as age, + zipcode, and SSN. A data *origin* taxonomy could + contain policy tags to distinguish user data, + employee data, partner data, public data. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.GetTaxonomyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.GetTaxonomyRequest): + request = policytagmanager.GetTaxonomyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_taxonomy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_policy_tag(self, + request: Optional[Union[policytagmanager.CreatePolicyTagRequest, dict]] = None, + *, + parent: Optional[str] = None, + policy_tag: Optional[policytagmanager.PolicyTag] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Creates a policy tag in the specified taxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_create_policy_tag(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.CreatePolicyTagRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_policy_tag(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.CreatePolicyTagRequest, dict]): + The request object. Request message for + [CreatePolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.CreatePolicyTag]. + parent (str): + Required. Resource name of the + taxonomy that the policy tag will belong + to. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + policy_tag (google.cloud.datacatalog_v1beta1.types.PolicyTag): + The policy tag to be created. + This corresponds to the ``policy_tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.PolicyTag: + Denotes one policy tag in a taxonomy + (e.g. ssn). Policy Tags can be defined + in a hierarchy. For example, consider + the following hierarchy: + + Geolocation -> (LatLong, City, + ZipCode). PolicyTag "Geolocation" + contains three child policy tags: + "LatLong", "City", and "ZipCode". + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, policy_tag]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.CreatePolicyTagRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.CreatePolicyTagRequest): + request = policytagmanager.CreatePolicyTagRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if policy_tag is not None: + request.policy_tag = policy_tag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_policy_tag] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_policy_tag(self, + request: Optional[Union[policytagmanager.DeletePolicyTagRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a policy tag. Also deletes all of its + descendant policy tags. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_delete_policy_tag(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeletePolicyTagRequest( + name="name_value", + ) + + # Make the request + client.delete_policy_tag(request=request) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.DeletePolicyTagRequest, dict]): + The request object. Request message for + [DeletePolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.DeletePolicyTag]. + name (str): + Required. Resource name of the policy + tag to be deleted. All of its descendant + policy tags will also be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.DeletePolicyTagRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.DeletePolicyTagRequest): + request = policytagmanager.DeletePolicyTagRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_policy_tag] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def update_policy_tag(self, + request: Optional[Union[policytagmanager.UpdatePolicyTagRequest, dict]] = None, + *, + policy_tag: Optional[policytagmanager.PolicyTag] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Updates a policy tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_update_policy_tag(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.UpdatePolicyTagRequest( + ) + + # Make the request + response = client.update_policy_tag(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.UpdatePolicyTagRequest, dict]): + The request object. Request message for + [UpdatePolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.UpdatePolicyTag]. + policy_tag (google.cloud.datacatalog_v1beta1.types.PolicyTag): + The policy tag to update. Only the description, + display_name, and parent_policy_tag fields can be + updated. + + This corresponds to the ``policy_tag`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.PolicyTag: + Denotes one policy tag in a taxonomy + (e.g. ssn). Policy Tags can be defined + in a hierarchy. For example, consider + the following hierarchy: + + Geolocation -> (LatLong, City, + ZipCode). PolicyTag "Geolocation" + contains three child policy tags: + "LatLong", "City", and "ZipCode". + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([policy_tag]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.UpdatePolicyTagRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.UpdatePolicyTagRequest): + request = policytagmanager.UpdatePolicyTagRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if policy_tag is not None: + request.policy_tag = policy_tag + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_policy_tag] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("policy_tag.name", request.policy_tag.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_policy_tags(self, + request: Optional[Union[policytagmanager.ListPolicyTagsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPolicyTagsPager: + r"""Lists all policy tags in a taxonomy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_list_policy_tags(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ListPolicyTagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_policy_tags(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.ListPolicyTagsRequest, dict]): + The request object. Request message for + [ListPolicyTags][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListPolicyTags]. + parent (str): + Required. Resource name of the + taxonomy to list the policy tags of. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.services.policy_tag_manager.pagers.ListPolicyTagsPager: + Response message for + [ListPolicyTags][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListPolicyTags]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.ListPolicyTagsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.ListPolicyTagsRequest): + request = policytagmanager.ListPolicyTagsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_policy_tags] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPolicyTagsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_policy_tag(self, + request: Optional[Union[policytagmanager.GetPolicyTagRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanager.PolicyTag: + r"""Gets a policy tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_get_policy_tag(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.GetPolicyTagRequest( + name="name_value", + ) + + # Make the request + response = client.get_policy_tag(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.GetPolicyTagRequest, dict]): + The request object. Request message for + [GetPolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.GetPolicyTag]. + name (str): + Required. Resource name of the + requested policy tag. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.PolicyTag: + Denotes one policy tag in a taxonomy + (e.g. ssn). Policy Tags can be defined + in a hierarchy. For example, consider + the following hierarchy: + + Geolocation -> (LatLong, City, + ZipCode). PolicyTag "Geolocation" + contains three child policy tags: + "LatLong", "City", and "ZipCode". + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanager.GetPolicyTagRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanager.GetPolicyTagRequest): + request = policytagmanager.GetPolicyTagRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_policy_tag] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy(self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM policy for a taxonomy or a policy tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_get_iam_policy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): + The request object. Request message for ``GetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy(self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM policy for a taxonomy or a policy tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_set_iam_policy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): + The request object. Request message for ``SetIamPolicy`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions(self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Returns the permissions that a caller has on the + specified taxonomy or policy tag. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_test_iam_permissions(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "PolicyTagManagerClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "PolicyTagManagerClient", +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/pagers.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/pagers.py new file mode 100644 index 000000000000..c505e3c5ebbe --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/pagers.py @@ -0,0 +1,260 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.datacatalog_v1beta1.types import policytagmanager + + +class ListTaxonomiesPager: + """A pager for iterating through ``list_taxonomies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1beta1.types.ListTaxonomiesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``taxonomies`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTaxonomies`` requests and continue to iterate + through the ``taxonomies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1beta1.types.ListTaxonomiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., policytagmanager.ListTaxonomiesResponse], + request: policytagmanager.ListTaxonomiesRequest, + response: policytagmanager.ListTaxonomiesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1beta1.types.ListTaxonomiesRequest): + The initial request object. + response (google.cloud.datacatalog_v1beta1.types.ListTaxonomiesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = policytagmanager.ListTaxonomiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[policytagmanager.ListTaxonomiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[policytagmanager.Taxonomy]: + for page in self.pages: + yield from page.taxonomies + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTaxonomiesAsyncPager: + """A pager for iterating through ``list_taxonomies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1beta1.types.ListTaxonomiesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``taxonomies`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTaxonomies`` requests and continue to iterate + through the ``taxonomies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1beta1.types.ListTaxonomiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[policytagmanager.ListTaxonomiesResponse]], + request: policytagmanager.ListTaxonomiesRequest, + response: policytagmanager.ListTaxonomiesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1beta1.types.ListTaxonomiesRequest): + The initial request object. + response (google.cloud.datacatalog_v1beta1.types.ListTaxonomiesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = policytagmanager.ListTaxonomiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[policytagmanager.ListTaxonomiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[policytagmanager.Taxonomy]: + async def async_generator(): + async for page in self.pages: + for response in page.taxonomies: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPolicyTagsPager: + """A pager for iterating through ``list_policy_tags`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1beta1.types.ListPolicyTagsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``policy_tags`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPolicyTags`` requests and continue to iterate + through the ``policy_tags`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1beta1.types.ListPolicyTagsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., policytagmanager.ListPolicyTagsResponse], + request: policytagmanager.ListPolicyTagsRequest, + response: policytagmanager.ListPolicyTagsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1beta1.types.ListPolicyTagsRequest): + The initial request object. + response (google.cloud.datacatalog_v1beta1.types.ListPolicyTagsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = policytagmanager.ListPolicyTagsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[policytagmanager.ListPolicyTagsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[policytagmanager.PolicyTag]: + for page in self.pages: + yield from page.policy_tags + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListPolicyTagsAsyncPager: + """A pager for iterating through ``list_policy_tags`` requests. + + This class thinly wraps an initial + :class:`google.cloud.datacatalog_v1beta1.types.ListPolicyTagsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``policy_tags`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPolicyTags`` requests and continue to iterate + through the ``policy_tags`` field on the + corresponding responses. + + All the usual :class:`google.cloud.datacatalog_v1beta1.types.ListPolicyTagsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[policytagmanager.ListPolicyTagsResponse]], + request: policytagmanager.ListPolicyTagsRequest, + response: policytagmanager.ListPolicyTagsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.datacatalog_v1beta1.types.ListPolicyTagsRequest): + The initial request object. + response (google.cloud.datacatalog_v1beta1.types.ListPolicyTagsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = policytagmanager.ListPolicyTagsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[policytagmanager.ListPolicyTagsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[policytagmanager.PolicyTag]: + async def async_generator(): + async for page in self.pages: + for response in page.policy_tags: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/__init__.py new file mode 100644 index 000000000000..192f3e97b958 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import PolicyTagManagerTransport +from .grpc import PolicyTagManagerGrpcTransport +from .grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PolicyTagManagerTransport]] +_transport_registry['grpc'] = PolicyTagManagerGrpcTransport +_transport_registry['grpc_asyncio'] = PolicyTagManagerGrpcAsyncIOTransport + +__all__ = ( + 'PolicyTagManagerTransport', + 'PolicyTagManagerGrpcTransport', + 'PolicyTagManagerGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py new file mode 100644 index 000000000000..c5142bbfcb5d --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py @@ -0,0 +1,320 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.datacatalog_v1beta1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.datacatalog_v1beta1.types import policytagmanager +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class PolicyTagManagerTransport(abc.ABC): + """Abstract transport class for PolicyTagManager.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'datacatalog.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_taxonomy: gapic_v1.method.wrap_method( + self.create_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.delete_taxonomy: gapic_v1.method.wrap_method( + self.delete_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.update_taxonomy: gapic_v1.method.wrap_method( + self.update_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.list_taxonomies: gapic_v1.method.wrap_method( + self.list_taxonomies, + default_timeout=None, + client_info=client_info, + ), + self.get_taxonomy: gapic_v1.method.wrap_method( + self.get_taxonomy, + default_timeout=None, + client_info=client_info, + ), + self.create_policy_tag: gapic_v1.method.wrap_method( + self.create_policy_tag, + default_timeout=None, + client_info=client_info, + ), + self.delete_policy_tag: gapic_v1.method.wrap_method( + self.delete_policy_tag, + default_timeout=None, + client_info=client_info, + ), + self.update_policy_tag: gapic_v1.method.wrap_method( + self.update_policy_tag, + default_timeout=None, + client_info=client_info, + ), + self.list_policy_tags: gapic_v1.method.wrap_method( + self.list_policy_tags, + default_timeout=None, + client_info=client_info, + ), + self.get_policy_tag: gapic_v1.method.wrap_method( + self.get_policy_tag, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_taxonomy(self) -> Callable[ + [policytagmanager.CreateTaxonomyRequest], + Union[ + policytagmanager.Taxonomy, + Awaitable[policytagmanager.Taxonomy] + ]]: + raise NotImplementedError() + + @property + def delete_taxonomy(self) -> Callable[ + [policytagmanager.DeleteTaxonomyRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def update_taxonomy(self) -> Callable[ + [policytagmanager.UpdateTaxonomyRequest], + Union[ + policytagmanager.Taxonomy, + Awaitable[policytagmanager.Taxonomy] + ]]: + raise NotImplementedError() + + @property + def list_taxonomies(self) -> Callable[ + [policytagmanager.ListTaxonomiesRequest], + Union[ + policytagmanager.ListTaxonomiesResponse, + Awaitable[policytagmanager.ListTaxonomiesResponse] + ]]: + raise NotImplementedError() + + @property + def get_taxonomy(self) -> Callable[ + [policytagmanager.GetTaxonomyRequest], + Union[ + policytagmanager.Taxonomy, + Awaitable[policytagmanager.Taxonomy] + ]]: + raise NotImplementedError() + + @property + def create_policy_tag(self) -> Callable[ + [policytagmanager.CreatePolicyTagRequest], + Union[ + policytagmanager.PolicyTag, + Awaitable[policytagmanager.PolicyTag] + ]]: + raise NotImplementedError() + + @property + def delete_policy_tag(self) -> Callable[ + [policytagmanager.DeletePolicyTagRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def update_policy_tag(self) -> Callable[ + [policytagmanager.UpdatePolicyTagRequest], + Union[ + policytagmanager.PolicyTag, + Awaitable[policytagmanager.PolicyTag] + ]]: + raise NotImplementedError() + + @property + def list_policy_tags(self) -> Callable[ + [policytagmanager.ListPolicyTagsRequest], + Union[ + policytagmanager.ListPolicyTagsResponse, + Awaitable[policytagmanager.ListPolicyTagsResponse] + ]]: + raise NotImplementedError() + + @property + def get_policy_tag(self) -> Callable[ + [policytagmanager.GetPolicyTagRequest], + Union[ + policytagmanager.PolicyTag, + Awaitable[policytagmanager.PolicyTag] + ]]: + raise NotImplementedError() + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[ + policy_pb2.Policy, + Awaitable[policy_pb2.Policy] + ]]: + raise NotImplementedError() + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'PolicyTagManagerTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py new file mode 100644 index 000000000000..08165c6811a8 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py @@ -0,0 +1,586 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.datacatalog_v1beta1.types import policytagmanager +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO + + +class PolicyTagManagerGrpcTransport(PolicyTagManagerTransport): + """gRPC backend transport for PolicyTagManager. + + The policy tag manager API service allows clients to manage + their taxonomies and policy tags. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_taxonomy(self) -> Callable[ + [policytagmanager.CreateTaxonomyRequest], + policytagmanager.Taxonomy]: + r"""Return a callable for the create taxonomy method over gRPC. + + Creates a taxonomy in the specified project. + + Returns: + Callable[[~.CreateTaxonomyRequest], + ~.Taxonomy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_taxonomy' not in self._stubs: + self._stubs['create_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/CreateTaxonomy', + request_serializer=policytagmanager.CreateTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs['create_taxonomy'] + + @property + def delete_taxonomy(self) -> Callable[ + [policytagmanager.DeleteTaxonomyRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete taxonomy method over gRPC. + + Deletes a taxonomy. This operation will also delete + all policy tags in this taxonomy along with their + associated policies. + + Returns: + Callable[[~.DeleteTaxonomyRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_taxonomy' not in self._stubs: + self._stubs['delete_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/DeleteTaxonomy', + request_serializer=policytagmanager.DeleteTaxonomyRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_taxonomy'] + + @property + def update_taxonomy(self) -> Callable[ + [policytagmanager.UpdateTaxonomyRequest], + policytagmanager.Taxonomy]: + r"""Return a callable for the update taxonomy method over gRPC. + + Updates a taxonomy. + + Returns: + Callable[[~.UpdateTaxonomyRequest], + ~.Taxonomy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_taxonomy' not in self._stubs: + self._stubs['update_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/UpdateTaxonomy', + request_serializer=policytagmanager.UpdateTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs['update_taxonomy'] + + @property + def list_taxonomies(self) -> Callable[ + [policytagmanager.ListTaxonomiesRequest], + policytagmanager.ListTaxonomiesResponse]: + r"""Return a callable for the list taxonomies method over gRPC. + + Lists all taxonomies in a project in a particular + location that the caller has permission to view. + + Returns: + Callable[[~.ListTaxonomiesRequest], + ~.ListTaxonomiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_taxonomies' not in self._stubs: + self._stubs['list_taxonomies'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/ListTaxonomies', + request_serializer=policytagmanager.ListTaxonomiesRequest.serialize, + response_deserializer=policytagmanager.ListTaxonomiesResponse.deserialize, + ) + return self._stubs['list_taxonomies'] + + @property + def get_taxonomy(self) -> Callable[ + [policytagmanager.GetTaxonomyRequest], + policytagmanager.Taxonomy]: + r"""Return a callable for the get taxonomy method over gRPC. + + Gets a taxonomy. + + Returns: + Callable[[~.GetTaxonomyRequest], + ~.Taxonomy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_taxonomy' not in self._stubs: + self._stubs['get_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/GetTaxonomy', + request_serializer=policytagmanager.GetTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs['get_taxonomy'] + + @property + def create_policy_tag(self) -> Callable[ + [policytagmanager.CreatePolicyTagRequest], + policytagmanager.PolicyTag]: + r"""Return a callable for the create policy tag method over gRPC. + + Creates a policy tag in the specified taxonomy. + + Returns: + Callable[[~.CreatePolicyTagRequest], + ~.PolicyTag]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_policy_tag' not in self._stubs: + self._stubs['create_policy_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/CreatePolicyTag', + request_serializer=policytagmanager.CreatePolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs['create_policy_tag'] + + @property + def delete_policy_tag(self) -> Callable[ + [policytagmanager.DeletePolicyTagRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete policy tag method over gRPC. + + Deletes a policy tag. Also deletes all of its + descendant policy tags. + + Returns: + Callable[[~.DeletePolicyTagRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_policy_tag' not in self._stubs: + self._stubs['delete_policy_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/DeletePolicyTag', + request_serializer=policytagmanager.DeletePolicyTagRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_policy_tag'] + + @property + def update_policy_tag(self) -> Callable[ + [policytagmanager.UpdatePolicyTagRequest], + policytagmanager.PolicyTag]: + r"""Return a callable for the update policy tag method over gRPC. + + Updates a policy tag. + + Returns: + Callable[[~.UpdatePolicyTagRequest], + ~.PolicyTag]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_policy_tag' not in self._stubs: + self._stubs['update_policy_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/UpdatePolicyTag', + request_serializer=policytagmanager.UpdatePolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs['update_policy_tag'] + + @property + def list_policy_tags(self) -> Callable[ + [policytagmanager.ListPolicyTagsRequest], + policytagmanager.ListPolicyTagsResponse]: + r"""Return a callable for the list policy tags method over gRPC. + + Lists all policy tags in a taxonomy. + + Returns: + Callable[[~.ListPolicyTagsRequest], + ~.ListPolicyTagsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_policy_tags' not in self._stubs: + self._stubs['list_policy_tags'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/ListPolicyTags', + request_serializer=policytagmanager.ListPolicyTagsRequest.serialize, + response_deserializer=policytagmanager.ListPolicyTagsResponse.deserialize, + ) + return self._stubs['list_policy_tags'] + + @property + def get_policy_tag(self) -> Callable[ + [policytagmanager.GetPolicyTagRequest], + policytagmanager.PolicyTag]: + r"""Return a callable for the get policy tag method over gRPC. + + Gets a policy tag. + + Returns: + Callable[[~.GetPolicyTagRequest], + ~.PolicyTag]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_policy_tag' not in self._stubs: + self._stubs['get_policy_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/GetPolicyTag', + request_serializer=policytagmanager.GetPolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs['get_policy_tag'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the IAM policy for a taxonomy or a policy tag. + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the IAM policy for a taxonomy or a policy tag. + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns the permissions that a caller has on the + specified taxonomy or policy tag. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'PolicyTagManagerGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py new file mode 100644 index 000000000000..b6616acc6c20 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py @@ -0,0 +1,585 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.datacatalog_v1beta1.types import policytagmanager +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO +from .grpc import PolicyTagManagerGrpcTransport + + +class PolicyTagManagerGrpcAsyncIOTransport(PolicyTagManagerTransport): + """gRPC AsyncIO backend transport for PolicyTagManager. + + The policy tag manager API service allows clients to manage + their taxonomies and policy tags. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_taxonomy(self) -> Callable[ + [policytagmanager.CreateTaxonomyRequest], + Awaitable[policytagmanager.Taxonomy]]: + r"""Return a callable for the create taxonomy method over gRPC. + + Creates a taxonomy in the specified project. + + Returns: + Callable[[~.CreateTaxonomyRequest], + Awaitable[~.Taxonomy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_taxonomy' not in self._stubs: + self._stubs['create_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/CreateTaxonomy', + request_serializer=policytagmanager.CreateTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs['create_taxonomy'] + + @property + def delete_taxonomy(self) -> Callable[ + [policytagmanager.DeleteTaxonomyRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete taxonomy method over gRPC. + + Deletes a taxonomy. This operation will also delete + all policy tags in this taxonomy along with their + associated policies. + + Returns: + Callable[[~.DeleteTaxonomyRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_taxonomy' not in self._stubs: + self._stubs['delete_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/DeleteTaxonomy', + request_serializer=policytagmanager.DeleteTaxonomyRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_taxonomy'] + + @property + def update_taxonomy(self) -> Callable[ + [policytagmanager.UpdateTaxonomyRequest], + Awaitable[policytagmanager.Taxonomy]]: + r"""Return a callable for the update taxonomy method over gRPC. + + Updates a taxonomy. + + Returns: + Callable[[~.UpdateTaxonomyRequest], + Awaitable[~.Taxonomy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_taxonomy' not in self._stubs: + self._stubs['update_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/UpdateTaxonomy', + request_serializer=policytagmanager.UpdateTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs['update_taxonomy'] + + @property + def list_taxonomies(self) -> Callable[ + [policytagmanager.ListTaxonomiesRequest], + Awaitable[policytagmanager.ListTaxonomiesResponse]]: + r"""Return a callable for the list taxonomies method over gRPC. + + Lists all taxonomies in a project in a particular + location that the caller has permission to view. + + Returns: + Callable[[~.ListTaxonomiesRequest], + Awaitable[~.ListTaxonomiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_taxonomies' not in self._stubs: + self._stubs['list_taxonomies'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/ListTaxonomies', + request_serializer=policytagmanager.ListTaxonomiesRequest.serialize, + response_deserializer=policytagmanager.ListTaxonomiesResponse.deserialize, + ) + return self._stubs['list_taxonomies'] + + @property + def get_taxonomy(self) -> Callable[ + [policytagmanager.GetTaxonomyRequest], + Awaitable[policytagmanager.Taxonomy]]: + r"""Return a callable for the get taxonomy method over gRPC. + + Gets a taxonomy. + + Returns: + Callable[[~.GetTaxonomyRequest], + Awaitable[~.Taxonomy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_taxonomy' not in self._stubs: + self._stubs['get_taxonomy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/GetTaxonomy', + request_serializer=policytagmanager.GetTaxonomyRequest.serialize, + response_deserializer=policytagmanager.Taxonomy.deserialize, + ) + return self._stubs['get_taxonomy'] + + @property + def create_policy_tag(self) -> Callable[ + [policytagmanager.CreatePolicyTagRequest], + Awaitable[policytagmanager.PolicyTag]]: + r"""Return a callable for the create policy tag method over gRPC. + + Creates a policy tag in the specified taxonomy. + + Returns: + Callable[[~.CreatePolicyTagRequest], + Awaitable[~.PolicyTag]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_policy_tag' not in self._stubs: + self._stubs['create_policy_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/CreatePolicyTag', + request_serializer=policytagmanager.CreatePolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs['create_policy_tag'] + + @property + def delete_policy_tag(self) -> Callable[ + [policytagmanager.DeletePolicyTagRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete policy tag method over gRPC. + + Deletes a policy tag. Also deletes all of its + descendant policy tags. + + Returns: + Callable[[~.DeletePolicyTagRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_policy_tag' not in self._stubs: + self._stubs['delete_policy_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/DeletePolicyTag', + request_serializer=policytagmanager.DeletePolicyTagRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_policy_tag'] + + @property + def update_policy_tag(self) -> Callable[ + [policytagmanager.UpdatePolicyTagRequest], + Awaitable[policytagmanager.PolicyTag]]: + r"""Return a callable for the update policy tag method over gRPC. + + Updates a policy tag. + + Returns: + Callable[[~.UpdatePolicyTagRequest], + Awaitable[~.PolicyTag]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_policy_tag' not in self._stubs: + self._stubs['update_policy_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/UpdatePolicyTag', + request_serializer=policytagmanager.UpdatePolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs['update_policy_tag'] + + @property + def list_policy_tags(self) -> Callable[ + [policytagmanager.ListPolicyTagsRequest], + Awaitable[policytagmanager.ListPolicyTagsResponse]]: + r"""Return a callable for the list policy tags method over gRPC. + + Lists all policy tags in a taxonomy. + + Returns: + Callable[[~.ListPolicyTagsRequest], + Awaitable[~.ListPolicyTagsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_policy_tags' not in self._stubs: + self._stubs['list_policy_tags'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/ListPolicyTags', + request_serializer=policytagmanager.ListPolicyTagsRequest.serialize, + response_deserializer=policytagmanager.ListPolicyTagsResponse.deserialize, + ) + return self._stubs['list_policy_tags'] + + @property + def get_policy_tag(self) -> Callable[ + [policytagmanager.GetPolicyTagRequest], + Awaitable[policytagmanager.PolicyTag]]: + r"""Return a callable for the get policy tag method over gRPC. + + Gets a policy tag. + + Returns: + Callable[[~.GetPolicyTagRequest], + Awaitable[~.PolicyTag]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_policy_tag' not in self._stubs: + self._stubs['get_policy_tag'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/GetPolicyTag', + request_serializer=policytagmanager.GetPolicyTagRequest.serialize, + response_deserializer=policytagmanager.PolicyTag.deserialize, + ) + return self._stubs['get_policy_tag'] + + @property + def get_iam_policy(self) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the IAM policy for a taxonomy or a policy tag. + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_iam_policy' not in self._stubs: + self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/GetIamPolicy', + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['get_iam_policy'] + + @property + def set_iam_policy(self) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets the IAM policy for a taxonomy or a policy tag. + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'set_iam_policy' not in self._stubs: + self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/SetIamPolicy', + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs['set_iam_policy'] + + @property + def test_iam_permissions(self) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: + r"""Return a callable for the test iam permissions method over gRPC. + + Returns the permissions that a caller has on the + specified taxonomy or policy tag. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'test_iam_permissions' not in self._stubs: + self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManager/TestIamPermissions', + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs['test_iam_permissions'] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'PolicyTagManagerGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/__init__.py new file mode 100644 index 000000000000..0592b8ffb549 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import PolicyTagManagerSerializationClient +from .async_client import PolicyTagManagerSerializationAsyncClient + +__all__ = ( + 'PolicyTagManagerSerializationClient', + 'PolicyTagManagerSerializationAsyncClient', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py new file mode 100644 index 000000000000..e84999231ee4 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py @@ -0,0 +1,380 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.datacatalog_v1beta1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.datacatalog_v1beta1.types import policytagmanager +from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .transports.base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport +from .client import PolicyTagManagerSerializationClient + + +class PolicyTagManagerSerializationAsyncClient: + """Policy tag manager serialization API service allows clients + to manipulate their taxonomies and policy tags data with + serialized format. + """ + + _client: PolicyTagManagerSerializationClient + + DEFAULT_ENDPOINT = PolicyTagManagerSerializationClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PolicyTagManagerSerializationClient.DEFAULT_MTLS_ENDPOINT + + taxonomy_path = staticmethod(PolicyTagManagerSerializationClient.taxonomy_path) + parse_taxonomy_path = staticmethod(PolicyTagManagerSerializationClient.parse_taxonomy_path) + common_billing_account_path = staticmethod(PolicyTagManagerSerializationClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(PolicyTagManagerSerializationClient.parse_common_billing_account_path) + common_folder_path = staticmethod(PolicyTagManagerSerializationClient.common_folder_path) + parse_common_folder_path = staticmethod(PolicyTagManagerSerializationClient.parse_common_folder_path) + common_organization_path = staticmethod(PolicyTagManagerSerializationClient.common_organization_path) + parse_common_organization_path = staticmethod(PolicyTagManagerSerializationClient.parse_common_organization_path) + common_project_path = staticmethod(PolicyTagManagerSerializationClient.common_project_path) + parse_common_project_path = staticmethod(PolicyTagManagerSerializationClient.parse_common_project_path) + common_location_path = staticmethod(PolicyTagManagerSerializationClient.common_location_path) + parse_common_location_path = staticmethod(PolicyTagManagerSerializationClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerSerializationAsyncClient: The constructed client. + """ + return PolicyTagManagerSerializationClient.from_service_account_info.__func__(PolicyTagManagerSerializationAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerSerializationAsyncClient: The constructed client. + """ + return PolicyTagManagerSerializationClient.from_service_account_file.__func__(PolicyTagManagerSerializationAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return PolicyTagManagerSerializationClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> PolicyTagManagerSerializationTransport: + """Returns the transport used by the client instance. + + Returns: + PolicyTagManagerSerializationTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(PolicyTagManagerSerializationClient).get_transport_class, type(PolicyTagManagerSerializationClient)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, PolicyTagManagerSerializationTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the policy tag manager serialization client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.PolicyTagManagerSerializationTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = PolicyTagManagerSerializationClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def import_taxonomies(self, + request: Optional[Union[policytagmanagerserialization.ImportTaxonomiesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanagerserialization.ImportTaxonomiesResponse: + r"""Imports all taxonomies and their policy tags to a + project as new taxonomies. + + This method provides a bulk taxonomy / policy tag + creation using nested proto structure. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_import_taxonomies(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerSerializationAsyncClient() + + # Initialize request argument(s) + inline_source = datacatalog_v1beta1.InlineSource() + inline_source.taxonomies.display_name = "display_name_value" + + request = datacatalog_v1beta1.ImportTaxonomiesRequest( + inline_source=inline_source, + parent="parent_value", + ) + + # Make the request + response = await client.import_taxonomies(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.ImportTaxonomiesRequest, dict]]): + The request object. Request message for + [ImportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ImportTaxonomies]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.ImportTaxonomiesResponse: + Response message for + [ImportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ImportTaxonomies]. + + """ + # Create or coerce a protobuf request object. + request = policytagmanagerserialization.ImportTaxonomiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.import_taxonomies, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def export_taxonomies(self, + request: Optional[Union[policytagmanagerserialization.ExportTaxonomiesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanagerserialization.ExportTaxonomiesResponse: + r"""Exports all taxonomies and their policy tags in a + project. + This method generates SerializedTaxonomy protos with + nested policy tags that can be used as an input for + future ImportTaxonomies calls. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + async def sample_export_taxonomies(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerSerializationAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ExportTaxonomiesRequest( + serialized_taxonomies=True, + parent="parent_value", + taxonomies=['taxonomies_value1', 'taxonomies_value2'], + ) + + # Make the request + response = await client.export_taxonomies(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1beta1.types.ExportTaxonomiesRequest, dict]]): + The request object. Request message for + [ExportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ExportTaxonomies]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.ExportTaxonomiesResponse: + Response message for + [ExportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ExportTaxonomies]. + + """ + # Create or coerce a protobuf request object. + request = policytagmanagerserialization.ExportTaxonomiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_taxonomies, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "PolicyTagManagerSerializationAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "PolicyTagManagerSerializationAsyncClient", +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py new file mode 100644 index 000000000000..1ca908e0b535 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py @@ -0,0 +1,590 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.datacatalog_v1beta1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.datacatalog_v1beta1.types import policytagmanager +from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .transports.base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import PolicyTagManagerSerializationGrpcTransport +from .transports.grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport + + +class PolicyTagManagerSerializationClientMeta(type): + """Metaclass for the PolicyTagManagerSerialization client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[PolicyTagManagerSerializationTransport]] + _transport_registry["grpc"] = PolicyTagManagerSerializationGrpcTransport + _transport_registry["grpc_asyncio"] = PolicyTagManagerSerializationGrpcAsyncIOTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[PolicyTagManagerSerializationTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PolicyTagManagerSerializationClient(metaclass=PolicyTagManagerSerializationClientMeta): + """Policy tag manager serialization API service allows clients + to manipulate their taxonomies and policy tags data with + serialized format. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "datacatalog.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerSerializationClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PolicyTagManagerSerializationClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PolicyTagManagerSerializationTransport: + """Returns the transport used by the client instance. + + Returns: + PolicyTagManagerSerializationTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def taxonomy_path(project: str,location: str,taxonomy: str,) -> str: + """Returns a fully-qualified taxonomy string.""" + return "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format(project=project, location=location, taxonomy=taxonomy, ) + + @staticmethod + def parse_taxonomy_path(path: str) -> Dict[str,str]: + """Parses a taxonomy path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/taxonomies/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, PolicyTagManagerSerializationTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the policy tag manager serialization client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, PolicyTagManagerSerializationTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, PolicyTagManagerSerializationTransport): + # transport is a PolicyTagManagerSerializationTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def import_taxonomies(self, + request: Optional[Union[policytagmanagerserialization.ImportTaxonomiesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanagerserialization.ImportTaxonomiesResponse: + r"""Imports all taxonomies and their policy tags to a + project as new taxonomies. + + This method provides a bulk taxonomy / policy tag + creation using nested proto structure. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_import_taxonomies(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerSerializationClient() + + # Initialize request argument(s) + inline_source = datacatalog_v1beta1.InlineSource() + inline_source.taxonomies.display_name = "display_name_value" + + request = datacatalog_v1beta1.ImportTaxonomiesRequest( + inline_source=inline_source, + parent="parent_value", + ) + + # Make the request + response = client.import_taxonomies(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.ImportTaxonomiesRequest, dict]): + The request object. Request message for + [ImportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ImportTaxonomies]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.ImportTaxonomiesResponse: + Response message for + [ImportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ImportTaxonomies]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanagerserialization.ImportTaxonomiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanagerserialization.ImportTaxonomiesRequest): + request = policytagmanagerserialization.ImportTaxonomiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_taxonomies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def export_taxonomies(self, + request: Optional[Union[policytagmanagerserialization.ExportTaxonomiesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policytagmanagerserialization.ExportTaxonomiesResponse: + r"""Exports all taxonomies and their policy tags in a + project. + This method generates SerializedTaxonomy protos with + nested policy tags that can be used as an input for + future ImportTaxonomies calls. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1beta1 + + def sample_export_taxonomies(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerSerializationClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ExportTaxonomiesRequest( + serialized_taxonomies=True, + parent="parent_value", + taxonomies=['taxonomies_value1', 'taxonomies_value2'], + ) + + # Make the request + response = client.export_taxonomies(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1beta1.types.ExportTaxonomiesRequest, dict]): + The request object. Request message for + [ExportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ExportTaxonomies]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1beta1.types.ExportTaxonomiesResponse: + Response message for + [ExportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ExportTaxonomies]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a policytagmanagerserialization.ExportTaxonomiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, policytagmanagerserialization.ExportTaxonomiesRequest): + request = policytagmanagerserialization.ExportTaxonomiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_taxonomies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "PolicyTagManagerSerializationClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "PolicyTagManagerSerializationClient", +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/__init__.py new file mode 100644 index 000000000000..faf2990e5837 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import PolicyTagManagerSerializationTransport +from .grpc import PolicyTagManagerSerializationGrpcTransport +from .grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PolicyTagManagerSerializationTransport]] +_transport_registry['grpc'] = PolicyTagManagerSerializationGrpcTransport +_transport_registry['grpc_asyncio'] = PolicyTagManagerSerializationGrpcAsyncIOTransport + +__all__ = ( + 'PolicyTagManagerSerializationTransport', + 'PolicyTagManagerSerializationGrpcTransport', + 'PolicyTagManagerSerializationGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py new file mode 100644 index 000000000000..650cfc4dd3d0 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py @@ -0,0 +1,165 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.datacatalog_v1beta1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class PolicyTagManagerSerializationTransport(abc.ABC): + """Abstract transport class for PolicyTagManagerSerialization.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'datacatalog.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.import_taxonomies: gapic_v1.method.wrap_method( + self.import_taxonomies, + default_timeout=None, + client_info=client_info, + ), + self.export_taxonomies: gapic_v1.method.wrap_method( + self.export_taxonomies, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def import_taxonomies(self) -> Callable[ + [policytagmanagerserialization.ImportTaxonomiesRequest], + Union[ + policytagmanagerserialization.ImportTaxonomiesResponse, + Awaitable[policytagmanagerserialization.ImportTaxonomiesResponse] + ]]: + raise NotImplementedError() + + @property + def export_taxonomies(self) -> Callable[ + [policytagmanagerserialization.ExportTaxonomiesRequest], + Union[ + policytagmanagerserialization.ExportTaxonomiesResponse, + Awaitable[policytagmanagerserialization.ExportTaxonomiesResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'PolicyTagManagerSerializationTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py new file mode 100644 index 000000000000..17c718d72ec0 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py @@ -0,0 +1,303 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO + + +class PolicyTagManagerSerializationGrpcTransport(PolicyTagManagerSerializationTransport): + """gRPC backend transport for PolicyTagManagerSerialization. + + Policy tag manager serialization API service allows clients + to manipulate their taxonomies and policy tags data with + serialized format. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def import_taxonomies(self) -> Callable[ + [policytagmanagerserialization.ImportTaxonomiesRequest], + policytagmanagerserialization.ImportTaxonomiesResponse]: + r"""Return a callable for the import taxonomies method over gRPC. + + Imports all taxonomies and their policy tags to a + project as new taxonomies. + + This method provides a bulk taxonomy / policy tag + creation using nested proto structure. + + Returns: + Callable[[~.ImportTaxonomiesRequest], + ~.ImportTaxonomiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'import_taxonomies' not in self._stubs: + self._stubs['import_taxonomies'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization/ImportTaxonomies', + request_serializer=policytagmanagerserialization.ImportTaxonomiesRequest.serialize, + response_deserializer=policytagmanagerserialization.ImportTaxonomiesResponse.deserialize, + ) + return self._stubs['import_taxonomies'] + + @property + def export_taxonomies(self) -> Callable[ + [policytagmanagerserialization.ExportTaxonomiesRequest], + policytagmanagerserialization.ExportTaxonomiesResponse]: + r"""Return a callable for the export taxonomies method over gRPC. + + Exports all taxonomies and their policy tags in a + project. + This method generates SerializedTaxonomy protos with + nested policy tags that can be used as an input for + future ImportTaxonomies calls. + + Returns: + Callable[[~.ExportTaxonomiesRequest], + ~.ExportTaxonomiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'export_taxonomies' not in self._stubs: + self._stubs['export_taxonomies'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization/ExportTaxonomies', + request_serializer=policytagmanagerserialization.ExportTaxonomiesRequest.serialize, + response_deserializer=policytagmanagerserialization.ExportTaxonomiesResponse.deserialize, + ) + return self._stubs['export_taxonomies'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'PolicyTagManagerSerializationGrpcTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py new file mode 100644 index 000000000000..e2ca1f84ad53 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py @@ -0,0 +1,302 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from .base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO +from .grpc import PolicyTagManagerSerializationGrpcTransport + + +class PolicyTagManagerSerializationGrpcAsyncIOTransport(PolicyTagManagerSerializationTransport): + """gRPC AsyncIO backend transport for PolicyTagManagerSerialization. + + Policy tag manager serialization API service allows clients + to manipulate their taxonomies and policy tags data with + serialized format. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'datacatalog.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def import_taxonomies(self) -> Callable[ + [policytagmanagerserialization.ImportTaxonomiesRequest], + Awaitable[policytagmanagerserialization.ImportTaxonomiesResponse]]: + r"""Return a callable for the import taxonomies method over gRPC. + + Imports all taxonomies and their policy tags to a + project as new taxonomies. + + This method provides a bulk taxonomy / policy tag + creation using nested proto structure. + + Returns: + Callable[[~.ImportTaxonomiesRequest], + Awaitable[~.ImportTaxonomiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'import_taxonomies' not in self._stubs: + self._stubs['import_taxonomies'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization/ImportTaxonomies', + request_serializer=policytagmanagerserialization.ImportTaxonomiesRequest.serialize, + response_deserializer=policytagmanagerserialization.ImportTaxonomiesResponse.deserialize, + ) + return self._stubs['import_taxonomies'] + + @property + def export_taxonomies(self) -> Callable[ + [policytagmanagerserialization.ExportTaxonomiesRequest], + Awaitable[policytagmanagerserialization.ExportTaxonomiesResponse]]: + r"""Return a callable for the export taxonomies method over gRPC. + + Exports all taxonomies and their policy tags in a + project. + This method generates SerializedTaxonomy protos with + nested policy tags that can be used as an input for + future ImportTaxonomies calls. + + Returns: + Callable[[~.ExportTaxonomiesRequest], + Awaitable[~.ExportTaxonomiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'export_taxonomies' not in self._stubs: + self._stubs['export_taxonomies'] = self.grpc_channel.unary_unary( + '/google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization/ExportTaxonomies', + request_serializer=policytagmanagerserialization.ExportTaxonomiesRequest.serialize, + response_deserializer=policytagmanagerserialization.ExportTaxonomiesResponse.deserialize, + ) + return self._stubs['export_taxonomies'] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'PolicyTagManagerSerializationGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/__init__.py new file mode 100644 index 000000000000..c895c656f715 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/__init__.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .datacatalog import ( + CreateEntryGroupRequest, + CreateEntryRequest, + CreateTagRequest, + CreateTagTemplateFieldRequest, + CreateTagTemplateRequest, + DeleteEntryGroupRequest, + DeleteEntryRequest, + DeleteTagRequest, + DeleteTagTemplateFieldRequest, + DeleteTagTemplateRequest, + Entry, + EntryGroup, + GetEntryGroupRequest, + GetEntryRequest, + GetTagTemplateRequest, + ListEntriesRequest, + ListEntriesResponse, + ListEntryGroupsRequest, + ListEntryGroupsResponse, + ListTagsRequest, + ListTagsResponse, + LookupEntryRequest, + RenameTagTemplateFieldEnumValueRequest, + RenameTagTemplateFieldRequest, + SearchCatalogRequest, + SearchCatalogResponse, + UpdateEntryGroupRequest, + UpdateEntryRequest, + UpdateTagRequest, + UpdateTagTemplateFieldRequest, + UpdateTagTemplateRequest, + EntryType, +) +from .gcs_fileset_spec import ( + GcsFilesetSpec, + GcsFileSpec, +) +from .policytagmanager import ( + CreatePolicyTagRequest, + CreateTaxonomyRequest, + DeletePolicyTagRequest, + DeleteTaxonomyRequest, + GetPolicyTagRequest, + GetTaxonomyRequest, + ListPolicyTagsRequest, + ListPolicyTagsResponse, + ListTaxonomiesRequest, + ListTaxonomiesResponse, + PolicyTag, + Taxonomy, + UpdatePolicyTagRequest, + UpdateTaxonomyRequest, +) +from .policytagmanagerserialization import ( + ExportTaxonomiesRequest, + ExportTaxonomiesResponse, + ImportTaxonomiesRequest, + ImportTaxonomiesResponse, + InlineSource, + SerializedPolicyTag, + SerializedTaxonomy, +) +from .schema import ( + ColumnSchema, + Schema, +) +from .search import ( + SearchCatalogResult, + SearchResultType, +) +from .table_spec import ( + BigQueryDateShardedSpec, + BigQueryTableSpec, + TableSpec, + ViewSpec, + TableSourceType, +) +from .tags import ( + FieldType, + Tag, + TagField, + TagTemplate, + TagTemplateField, +) +from .timestamps import ( + SystemTimestamps, +) +from .usage import ( + UsageSignal, + UsageStats, +) + +__all__ = ( + 'IntegratedSystem', + 'ManagingSystem', + 'CreateEntryGroupRequest', + 'CreateEntryRequest', + 'CreateTagRequest', + 'CreateTagTemplateFieldRequest', + 'CreateTagTemplateRequest', + 'DeleteEntryGroupRequest', + 'DeleteEntryRequest', + 'DeleteTagRequest', + 'DeleteTagTemplateFieldRequest', + 'DeleteTagTemplateRequest', + 'Entry', + 'EntryGroup', + 'GetEntryGroupRequest', + 'GetEntryRequest', + 'GetTagTemplateRequest', + 'ListEntriesRequest', + 'ListEntriesResponse', + 'ListEntryGroupsRequest', + 'ListEntryGroupsResponse', + 'ListTagsRequest', + 'ListTagsResponse', + 'LookupEntryRequest', + 'RenameTagTemplateFieldEnumValueRequest', + 'RenameTagTemplateFieldRequest', + 'SearchCatalogRequest', + 'SearchCatalogResponse', + 'UpdateEntryGroupRequest', + 'UpdateEntryRequest', + 'UpdateTagRequest', + 'UpdateTagTemplateFieldRequest', + 'UpdateTagTemplateRequest', + 'EntryType', + 'GcsFilesetSpec', + 'GcsFileSpec', + 'CreatePolicyTagRequest', + 'CreateTaxonomyRequest', + 'DeletePolicyTagRequest', + 'DeleteTaxonomyRequest', + 'GetPolicyTagRequest', + 'GetTaxonomyRequest', + 'ListPolicyTagsRequest', + 'ListPolicyTagsResponse', + 'ListTaxonomiesRequest', + 'ListTaxonomiesResponse', + 'PolicyTag', + 'Taxonomy', + 'UpdatePolicyTagRequest', + 'UpdateTaxonomyRequest', + 'ExportTaxonomiesRequest', + 'ExportTaxonomiesResponse', + 'ImportTaxonomiesRequest', + 'ImportTaxonomiesResponse', + 'InlineSource', + 'SerializedPolicyTag', + 'SerializedTaxonomy', + 'ColumnSchema', + 'Schema', + 'SearchCatalogResult', + 'SearchResultType', + 'BigQueryDateShardedSpec', + 'BigQueryTableSpec', + 'TableSpec', + 'ViewSpec', + 'TableSourceType', + 'FieldType', + 'Tag', + 'TagField', + 'TagTemplate', + 'TagTemplateField', + 'SystemTimestamps', + 'UsageSignal', + 'UsageStats', +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/common.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/common.py new file mode 100644 index 000000000000..2ba43f9f577f --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/common.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1beta1', + manifest={ + 'IntegratedSystem', + 'ManagingSystem', + }, +) + + +class IntegratedSystem(proto.Enum): + r"""This enum describes all the possible systems that Data + Catalog integrates with. + + Values: + INTEGRATED_SYSTEM_UNSPECIFIED (0): + Default unknown system. + BIGQUERY (1): + BigQuery. + CLOUD_PUBSUB (2): + Cloud Pub/Sub. + """ + INTEGRATED_SYSTEM_UNSPECIFIED = 0 + BIGQUERY = 1 + CLOUD_PUBSUB = 2 + + +class ManagingSystem(proto.Enum): + r"""This enum describes all the systems that manage + Taxonomy and PolicyTag resources in DataCatalog. + + Values: + MANAGING_SYSTEM_UNSPECIFIED (0): + Default value + MANAGING_SYSTEM_DATAPLEX (1): + Dataplex. + MANAGING_SYSTEM_OTHER (2): + Other + """ + MANAGING_SYSTEM_UNSPECIFIED = 0 + MANAGING_SYSTEM_DATAPLEX = 1 + MANAGING_SYSTEM_OTHER = 2 + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/datacatalog.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/datacatalog.py new file mode 100644 index 000000000000..28f0c21feae7 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/datacatalog.py @@ -0,0 +1,1363 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.datacatalog_v1beta1.types import common +from google.cloud.datacatalog_v1beta1.types import gcs_fileset_spec as gcd_gcs_fileset_spec +from google.cloud.datacatalog_v1beta1.types import schema as gcd_schema +from google.cloud.datacatalog_v1beta1.types import search +from google.cloud.datacatalog_v1beta1.types import table_spec +from google.cloud.datacatalog_v1beta1.types import tags as gcd_tags +from google.cloud.datacatalog_v1beta1.types import timestamps +from google.cloud.datacatalog_v1beta1.types import usage +from google.protobuf import field_mask_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1beta1', + manifest={ + 'EntryType', + 'SearchCatalogRequest', + 'SearchCatalogResponse', + 'CreateEntryGroupRequest', + 'UpdateEntryGroupRequest', + 'GetEntryGroupRequest', + 'DeleteEntryGroupRequest', + 'ListEntryGroupsRequest', + 'ListEntryGroupsResponse', + 'CreateEntryRequest', + 'UpdateEntryRequest', + 'DeleteEntryRequest', + 'GetEntryRequest', + 'LookupEntryRequest', + 'Entry', + 'EntryGroup', + 'CreateTagTemplateRequest', + 'GetTagTemplateRequest', + 'UpdateTagTemplateRequest', + 'DeleteTagTemplateRequest', + 'CreateTagRequest', + 'UpdateTagRequest', + 'DeleteTagRequest', + 'CreateTagTemplateFieldRequest', + 'UpdateTagTemplateFieldRequest', + 'RenameTagTemplateFieldRequest', + 'RenameTagTemplateFieldEnumValueRequest', + 'DeleteTagTemplateFieldRequest', + 'ListTagsRequest', + 'ListTagsResponse', + 'ListEntriesRequest', + 'ListEntriesResponse', + }, +) + + +class EntryType(proto.Enum): + r"""Entry resources in Data Catalog can be of different types e.g. a + BigQuery Table entry is of type ``TABLE``. This enum describes all + the possible types Data Catalog contains. + + Values: + ENTRY_TYPE_UNSPECIFIED (0): + Default unknown type. + TABLE (2): + Output only. The type of entry that has a + GoogleSQL schema, including logical views. + MODEL (5): + Output only. The type of models. + https://cloud.google.com/bigquery-ml/docs/bigqueryml-intro + DATA_STREAM (3): + Output only. An entry type which is used for + streaming entries. Example: Pub/Sub topic. + FILESET (4): + An entry type which is a set of files or + objects. Example: Cloud Storage fileset. + """ + ENTRY_TYPE_UNSPECIFIED = 0 + TABLE = 2 + MODEL = 5 + DATA_STREAM = 3 + FILESET = 4 + + +class SearchCatalogRequest(proto.Message): + r"""Request message for + [SearchCatalog][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog]. + + Attributes: + scope (google.cloud.datacatalog_v1beta1.types.SearchCatalogRequest.Scope): + Required. The scope of this search request. A ``scope`` that + has empty ``include_org_ids``, ``include_project_ids`` AND + false ``include_gcp_public_datasets`` is considered invalid. + Data Catalog will return an error in such a case. + query (str): + Optional. The query string in search query syntax. An empty + query string will result in all data assets (in the + specified scope) that the user has access to. Query strings + can be simple as "x" or more qualified as: + + - name:x + - column:x + - description:y + + Note: Query tokens need to have a minimum of 3 characters + for substring matching to work correctly. See `Data Catalog + Search + Syntax `__ + for more information. + page_size (int): + Number of results in the search page. If <=0 then defaults + to 10. Max limit for page_size is 1000. Throws an invalid + argument for page_size > 1000. + page_token (str): + Optional. Pagination token returned in an earlier + [SearchCatalogResponse.next_page_token][google.cloud.datacatalog.v1beta1.SearchCatalogResponse.next_page_token], + which indicates that this is a continuation of a prior + [SearchCatalogRequest][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog] + call, and that the system should return the next page of + data. If empty, the first page is returned. + order_by (str): + Specifies the ordering of results, currently supported + case-sensitive choices are: + + - ``relevance``, only supports descending + - ``last_modified_timestamp [asc|desc]``, defaults to + descending if not specified + - ``default`` that can only be descending + + If not specified, defaults to ``relevance`` descending. + """ + + class Scope(proto.Message): + r"""The criteria that select the subspace used for query + matching. + + Attributes: + include_org_ids (MutableSequence[str]): + The list of organization IDs to search + within. To find your organization ID, follow + instructions in + https://cloud.google.com/resource-manager/docs/creating-managing-organization. + include_project_ids (MutableSequence[str]): + The list of project IDs to search within. To + learn more about the distinction between project + names/IDs/numbers, go to + https://cloud.google.com/docs/overview/#projects. + include_gcp_public_datasets (bool): + If ``true``, include Google Cloud public datasets in the + search results. Info on Google Cloud public datasets is + available at https://cloud.google.com/public-datasets/. By + default, Google Cloud public datasets are excluded. + restricted_locations (MutableSequence[str]): + Optional. The list of locations to search within. + + 1. If empty, search will be performed in all locations; + 2. If any of the locations are NOT in the valid locations + list, error will be returned; + 3. Otherwise, search only the given locations for matching + results. Typical usage is to leave this field empty. When + a location is unreachable as returned in the + ``SearchCatalogResponse.unreachable`` field, users can + repeat the search request with this parameter set to get + additional information on the error. + + Valid locations: + + - asia-east1 + - asia-east2 + - asia-northeast1 + - asia-northeast2 + - asia-northeast3 + - asia-south1 + - asia-southeast1 + - australia-southeast1 + - eu + - europe-north1 + - europe-west1 + - europe-west2 + - europe-west3 + - europe-west4 + - europe-west6 + - global + - northamerica-northeast1 + - southamerica-east1 + - us + - us-central1 + - us-east1 + - us-east4 + - us-west1 + - us-west2 + """ + + include_org_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + include_project_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + include_gcp_public_datasets: bool = proto.Field( + proto.BOOL, + number=7, + ) + restricted_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=16, + ) + + scope: Scope = proto.Field( + proto.MESSAGE, + number=6, + message=Scope, + ) + query: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class SearchCatalogResponse(proto.Message): + r"""Response message for + [SearchCatalog][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog]. + + Attributes: + results (MutableSequence[google.cloud.datacatalog_v1beta1.types.SearchCatalogResult]): + Search results. + total_size (int): + The approximate total number of entries + matched by the query. + next_page_token (str): + The token that can be used to retrieve the + next page of results. + unreachable (MutableSequence[str]): + Unreachable locations. Search result does not include data + from those locations. Users can get additional information + on the error by repeating the search request with a more + restrictive parameter -- setting the value for + ``SearchDataCatalogRequest.scope.restricted_locations``. + """ + + @property + def raw_page(self): + return self + + results: MutableSequence[search.SearchCatalogResult] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=search.SearchCatalogResult, + ) + total_size: int = proto.Field( + proto.INT32, + number=2, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=3, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + + +class CreateEntryGroupRequest(proto.Message): + r"""Request message for + [CreateEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntryGroup]. + + Attributes: + parent (str): + Required. The name of the project this entry group is in. + Example: + + - projects/{project_id}/locations/{location} + + Note that this EntryGroup and its child resources may not + actually be stored in the location in this name. + entry_group_id (str): + Required. The id of the entry group to + create. The id must begin with a letter or + underscore, contain only English letters, + numbers and underscores, and be at most 64 + characters. + entry_group (google.cloud.datacatalog_v1beta1.types.EntryGroup): + The entry group to create. Defaults to an + empty entry group. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + entry_group_id: str = proto.Field( + proto.STRING, + number=3, + ) + entry_group: 'EntryGroup' = proto.Field( + proto.MESSAGE, + number=2, + message='EntryGroup', + ) + + +class UpdateEntryGroupRequest(proto.Message): + r"""Request message for + [UpdateEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntryGroup]. + + Attributes: + entry_group (google.cloud.datacatalog_v1beta1.types.EntryGroup): + Required. The updated entry group. "name" + field must be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Names of fields whose values to overwrite on + an entry group. + If this parameter is absent or empty, all + modifiable fields are overwritten. If such + fields are non-required and omitted in the + request body, their values are emptied. + """ + + entry_group: 'EntryGroup' = proto.Field( + proto.MESSAGE, + number=1, + message='EntryGroup', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class GetEntryGroupRequest(proto.Message): + r"""Request message for + [GetEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.GetEntryGroup]. + + Attributes: + name (str): + Required. The name of the entry group. For example, + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}``. + read_mask (google.protobuf.field_mask_pb2.FieldMask): + The fields to return. If not set or empty, + all fields are returned. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + read_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteEntryGroupRequest(proto.Message): + r"""Request message for + [DeleteEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntryGroup]. + + Attributes: + name (str): + Required. The name of the entry group. For example, + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}``. + force (bool): + Optional. If true, deletes all entries in the + entry group. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class ListEntryGroupsRequest(proto.Message): + r"""Request message for + [ListEntryGroups][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntryGroups]. + + Attributes: + parent (str): + Required. The name of the location that contains the entry + groups, which can be provided in URL format. Example: + + - projects/{project_id}/locations/{location} + page_size (int): + Optional. The maximum number of items to return. Default is + 10. Max limit is 1000. Throws an invalid argument for + ``page_size > 1000``. + page_token (str): + Optional. Token that specifies which page is + requested. If empty, the first page is returned. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListEntryGroupsResponse(proto.Message): + r"""Response message for + [ListEntryGroups][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntryGroups]. + + Attributes: + entry_groups (MutableSequence[google.cloud.datacatalog_v1beta1.types.EntryGroup]): + EntryGroup details. + next_page_token (str): + Token to retrieve the next page of results. + It is set to empty if no items remain in + results. + """ + + @property + def raw_page(self): + return self + + entry_groups: MutableSequence['EntryGroup'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='EntryGroup', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateEntryRequest(proto.Message): + r"""Request message for + [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry]. + + Attributes: + parent (str): + Required. The name of the entry group this entry is in. + Example: + + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} + + Note that this Entry and its child resources may not + actually be stored in the location in this name. + entry_id (str): + Required. The id of the entry to create. + entry (google.cloud.datacatalog_v1beta1.types.Entry): + Required. The entry to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + entry_id: str = proto.Field( + proto.STRING, + number=3, + ) + entry: 'Entry' = proto.Field( + proto.MESSAGE, + number=2, + message='Entry', + ) + + +class UpdateEntryRequest(proto.Message): + r"""Request message for + [UpdateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntry]. + + Attributes: + entry (google.cloud.datacatalog_v1beta1.types.Entry): + Required. The updated entry. The "name" field + must be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Names of fields whose values to overwrite on an entry. + + If this parameter is absent or empty, all modifiable fields + are overwritten. If such fields are non-required and omitted + in the request body, their values are emptied. + + The following fields are modifiable: + + - For entries with type ``DATA_STREAM``: + + - ``schema`` + + - For entries with type ``FILESET``: + + - ``schema`` + - ``display_name`` + - ``description`` + - ``gcs_fileset_spec`` + - ``gcs_fileset_spec.file_patterns`` + + - For entries with ``user_specified_type``: + + - ``schema`` + - ``display_name`` + - ``description`` + - ``user_specified_type`` + - ``user_specified_system`` + - ``linked_resource`` + - ``source_system_timestamps`` + """ + + entry: 'Entry' = proto.Field( + proto.MESSAGE, + number=1, + message='Entry', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteEntryRequest(proto.Message): + r"""Request message for + [DeleteEntry][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntry]. + + Attributes: + name (str): + Required. The name of the entry. Example: + + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetEntryRequest(proto.Message): + r"""Request message for + [GetEntry][google.cloud.datacatalog.v1beta1.DataCatalog.GetEntry]. + + Attributes: + name (str): + Required. The name of the entry. Example: + + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class LookupEntryRequest(proto.Message): + r"""Request message for + [LookupEntry][google.cloud.datacatalog.v1beta1.DataCatalog.LookupEntry]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + linked_resource (str): + The full name of the Google Cloud Platform resource the Data + Catalog entry represents. See: + https://cloud.google.com/apis/design/resource_names#full_resource_name. + Full names are case-sensitive. + + Examples: + + - //bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId + - //pubsub.googleapis.com/projects/projectId/topics/topicId + + This field is a member of `oneof`_ ``target_name``. + sql_resource (str): + The SQL name of the entry. SQL names are case-sensitive. + + Examples: + + - ``pubsub.project_id.topic_id`` + - :literal:`pubsub.project_id.`topic.id.with.dots\`` + - ``bigquery.table.project_id.dataset_id.table_id`` + - ``bigquery.dataset.project_id.dataset_id`` + - ``datacatalog.entry.project_id.location_id.entry_group_id.entry_id`` + + ``*_id``\ s should satisfy the standard SQL rules for + identifiers. + https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical. + + This field is a member of `oneof`_ ``target_name``. + """ + + linked_resource: str = proto.Field( + proto.STRING, + number=1, + oneof='target_name', + ) + sql_resource: str = proto.Field( + proto.STRING, + number=3, + oneof='target_name', + ) + + +class Entry(proto.Message): + r"""Entry Metadata. A Data Catalog Entry resource represents another + resource in Google Cloud Platform (such as a BigQuery dataset or a + Pub/Sub topic), or outside of Google Cloud Platform. Clients can use + the ``linked_resource`` field in the Entry resource to refer to the + original resource ID of the source system. + + An Entry resource contains resource details, such as its schema. An + Entry can also be used to attach flexible metadata, such as a + [Tag][google.cloud.datacatalog.v1beta1.Tag]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. The Data Catalog resource name of the entry in + URL format. Example: + + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + + Note that this Entry and its child resources may not + actually be stored in the location in this name. + linked_resource (str): + The resource this metadata entry refers to. + + For Google Cloud Platform resources, ``linked_resource`` is + the `full name of the + resource `__. + For example, the ``linked_resource`` for a table resource + from BigQuery is: + + - //bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId + + Output only when Entry is of type in the EntryType enum. For + entries with user_specified_type, this field is optional and + defaults to an empty string. + type_ (google.cloud.datacatalog_v1beta1.types.EntryType): + The type of the entry. + Only used for Entries with types in the + EntryType enum. + + This field is a member of `oneof`_ ``entry_type``. + user_specified_type (str): + Entry type if it does not fit any of the input-allowed + values listed in ``EntryType`` enum above. When creating an + entry, users should check the enum values first, if nothing + matches the entry to be created, then provide a custom + value, for example "my_special_type". + ``user_specified_type`` strings must begin with a letter or + underscore and can only contain letters, numbers, and + underscores; are case insensitive; must be at least 1 + character and at most 64 characters long. + + Currently, only FILESET enum value is allowed. All other + entries created through Data Catalog must use + ``user_specified_type``. + + This field is a member of `oneof`_ ``entry_type``. + integrated_system (google.cloud.datacatalog_v1beta1.types.IntegratedSystem): + Output only. This field indicates the entry's + source system that Data Catalog integrates with, + such as BigQuery or Pub/Sub. + + This field is a member of `oneof`_ ``system``. + user_specified_system (str): + This field indicates the entry's source system that Data + Catalog does not integrate with. ``user_specified_system`` + strings must begin with a letter or underscore and can only + contain letters, numbers, and underscores; are case + insensitive; must be at least 1 character and at most 64 + characters long. + + This field is a member of `oneof`_ ``system``. + gcs_fileset_spec (google.cloud.datacatalog_v1beta1.types.GcsFilesetSpec): + Specification that applies to a Cloud Storage + fileset. This is only valid on entries of type + FILESET. + + This field is a member of `oneof`_ ``type_spec``. + bigquery_table_spec (google.cloud.datacatalog_v1beta1.types.BigQueryTableSpec): + Specification that applies to a BigQuery table. This is only + valid on entries of type ``TABLE``. + + This field is a member of `oneof`_ ``type_spec``. + bigquery_date_sharded_spec (google.cloud.datacatalog_v1beta1.types.BigQueryDateShardedSpec): + Specification for a group of BigQuery tables with name + pattern ``[prefix]YYYYMMDD``. Context: + https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning_versus_sharding. + + This field is a member of `oneof`_ ``type_spec``. + display_name (str): + Display information such as title and + description. A short name to identify the entry, + for example, "Analytics Data - Jan 2011". + Default value is an empty string. + description (str): + Entry description, which can consist of + several sentences or paragraphs that describe + entry contents. Default value is an empty + string. + schema (google.cloud.datacatalog_v1beta1.types.Schema): + Schema of the entry. An entry might not have + any schema attached to it. + source_system_timestamps (google.cloud.datacatalog_v1beta1.types.SystemTimestamps): + Output only. Timestamps about the underlying resource, not + about this Data Catalog entry. Output only when Entry is of + type in the EntryType enum. For entries with + user_specified_type, this field is optional and defaults to + an empty timestamp. + usage_signal (google.cloud.datacatalog_v1beta1.types.UsageSignal): + Output only. Statistics on the usage level of + the resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + linked_resource: str = proto.Field( + proto.STRING, + number=9, + ) + type_: 'EntryType' = proto.Field( + proto.ENUM, + number=2, + oneof='entry_type', + enum='EntryType', + ) + user_specified_type: str = proto.Field( + proto.STRING, + number=16, + oneof='entry_type', + ) + integrated_system: common.IntegratedSystem = proto.Field( + proto.ENUM, + number=17, + oneof='system', + enum=common.IntegratedSystem, + ) + user_specified_system: str = proto.Field( + proto.STRING, + number=18, + oneof='system', + ) + gcs_fileset_spec: gcd_gcs_fileset_spec.GcsFilesetSpec = proto.Field( + proto.MESSAGE, + number=6, + oneof='type_spec', + message=gcd_gcs_fileset_spec.GcsFilesetSpec, + ) + bigquery_table_spec: table_spec.BigQueryTableSpec = proto.Field( + proto.MESSAGE, + number=12, + oneof='type_spec', + message=table_spec.BigQueryTableSpec, + ) + bigquery_date_sharded_spec: table_spec.BigQueryDateShardedSpec = proto.Field( + proto.MESSAGE, + number=15, + oneof='type_spec', + message=table_spec.BigQueryDateShardedSpec, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + schema: gcd_schema.Schema = proto.Field( + proto.MESSAGE, + number=5, + message=gcd_schema.Schema, + ) + source_system_timestamps: timestamps.SystemTimestamps = proto.Field( + proto.MESSAGE, + number=7, + message=timestamps.SystemTimestamps, + ) + usage_signal: usage.UsageSignal = proto.Field( + proto.MESSAGE, + number=13, + message=usage.UsageSignal, + ) + + +class EntryGroup(proto.Message): + r"""EntryGroup Metadata. An EntryGroup resource represents a logical + grouping of zero or more Data Catalog + [Entry][google.cloud.datacatalog.v1beta1.Entry] resources. + + Attributes: + name (str): + The resource name of the entry group in URL format. Example: + + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} + + Note that this EntryGroup and its child resources may not + actually be stored in the location in this name. + display_name (str): + A short name to identify the entry group, for + example, "analytics data - jan 2011". Default + value is an empty string. + description (str): + Entry group description, which can consist of + several sentences or paragraphs that describe + entry group contents. Default value is an empty + string. + data_catalog_timestamps (google.cloud.datacatalog_v1beta1.types.SystemTimestamps): + Output only. Timestamps about this + EntryGroup. Default value is empty timestamps. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + data_catalog_timestamps: timestamps.SystemTimestamps = proto.Field( + proto.MESSAGE, + number=4, + message=timestamps.SystemTimestamps, + ) + + +class CreateTagTemplateRequest(proto.Message): + r"""Request message for + [CreateTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplate]. + + Attributes: + parent (str): + Required. The name of the project and the template location + [region](https://cloud.google.com/data-catalog/docs/concepts/regions. + + Example: + + - projects/{project_id}/locations/us-central1 + tag_template_id (str): + Required. The id of the tag template to + create. + tag_template (google.cloud.datacatalog_v1beta1.types.TagTemplate): + Required. The tag template to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + tag_template_id: str = proto.Field( + proto.STRING, + number=3, + ) + tag_template: gcd_tags.TagTemplate = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_tags.TagTemplate, + ) + + +class GetTagTemplateRequest(proto.Message): + r"""Request message for + [GetTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.GetTagTemplate]. + + Attributes: + name (str): + Required. The name of the tag template. Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateTagTemplateRequest(proto.Message): + r"""Request message for + [UpdateTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplate]. + + Attributes: + tag_template (google.cloud.datacatalog_v1beta1.types.TagTemplate): + Required. The template to update. The "name" + field must be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Names of fields whose values to overwrite on a tag template. + Currently, only ``display_name`` can be overwritten. + + In general, if this parameter is absent or empty, all + modifiable fields are overwritten. If such fields are + non-required and omitted in the request body, their values + are emptied. + """ + + tag_template: gcd_tags.TagTemplate = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_tags.TagTemplate, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteTagTemplateRequest(proto.Message): + r"""Request message for + [DeleteTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplate]. + + Attributes: + name (str): + Required. The name of the tag template to delete. Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + force (bool): + Required. Currently, this field must always be set to + ``true``. This confirms the deletion of any possible tags + using this template. ``force = false`` will be supported in + the future. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class CreateTagRequest(proto.Message): + r"""Request message for + [CreateTag][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTag]. + + Attributes: + parent (str): + Required. The name of the resource to attach this tag to. + Tags can be attached to Entries. Example: + + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + + Note that this Tag and its child resources may not actually + be stored in the location in this name. + tag (google.cloud.datacatalog_v1beta1.types.Tag): + Required. The tag to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + tag: gcd_tags.Tag = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_tags.Tag, + ) + + +class UpdateTagRequest(proto.Message): + r"""Request message for + [UpdateTag][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTag]. + + Attributes: + tag (google.cloud.datacatalog_v1beta1.types.Tag): + Required. The updated tag. The "name" field + must be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Note: Currently, this parameter can only take ``"fields"`` + as value. + + Names of fields whose values to overwrite on a tag. + Currently, a tag has the only modifiable field with the name + ``fields``. + + In general, if this parameter is absent or empty, all + modifiable fields are overwritten. If such fields are + non-required and omitted in the request body, their values + are emptied. + """ + + tag: gcd_tags.Tag = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_tags.Tag, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteTagRequest(proto.Message): + r"""Request message for + [DeleteTag][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTag]. + + Attributes: + name (str): + Required. The name of the tag to delete. Example: + + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateTagTemplateFieldRequest(proto.Message): + r"""Request message for + [CreateTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplateField]. + + Attributes: + parent (str): + Required. The name of the project and the template location + `region `__. + + Example: + + - projects/{project_id}/locations/us-central1/tagTemplates/{tag_template_id} + tag_template_field_id (str): + Required. The ID of the tag template field to create. Field + ids can contain letters (both uppercase and lowercase), + numbers (0-9), underscores (_) and dashes (-). Field IDs + must be at least 1 character long and at most 128 characters + long. Field IDs must also be unique within their template. + tag_template_field (google.cloud.datacatalog_v1beta1.types.TagTemplateField): + Required. The tag template field to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + tag_template_field_id: str = proto.Field( + proto.STRING, + number=2, + ) + tag_template_field: gcd_tags.TagTemplateField = proto.Field( + proto.MESSAGE, + number=3, + message=gcd_tags.TagTemplateField, + ) + + +class UpdateTagTemplateFieldRequest(proto.Message): + r"""Request message for + [UpdateTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplateField]. + + Attributes: + name (str): + Required. The name of the tag template field. Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} + tag_template_field (google.cloud.datacatalog_v1beta1.types.TagTemplateField): + Required. The template to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Names of fields whose values to overwrite on an + individual field of a tag template. The following fields are + modifiable: + + - ``display_name`` + - ``type.enum_type`` + - ``is_required`` + + If this parameter is absent or empty, all modifiable fields + are overwritten. If such fields are non-required and omitted + in the request body, their values are emptied with one + exception: when updating an enum type, the provided values + are merged with the existing values. Therefore, enum values + can only be added, existing enum values cannot be deleted or + renamed. + + Additionally, updating a template field from optional to + required is *not* allowed. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + tag_template_field: gcd_tags.TagTemplateField = proto.Field( + proto.MESSAGE, + number=2, + message=gcd_tags.TagTemplateField, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class RenameTagTemplateFieldRequest(proto.Message): + r"""Request message for + [RenameTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.RenameTagTemplateField]. + + Attributes: + name (str): + Required. The name of the tag template. Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} + new_tag_template_field_id (str): + Required. The new ID of this tag template field. For + example, ``my_new_field``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + new_tag_template_field_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RenameTagTemplateFieldEnumValueRequest(proto.Message): + r"""Request message for + [RenameTagTemplateFieldEnumValue][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateFieldEnumValue]. + + Attributes: + name (str): + Required. The name of the enum field value. Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name} + new_enum_value_display_name (str): + Required. The new display name of the enum value. For + example, ``my_new_enum_value``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + new_enum_value_display_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteTagTemplateFieldRequest(proto.Message): + r"""Request message for + [DeleteTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplateField]. + + Attributes: + name (str): + Required. The name of the tag template field to delete. + Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} + force (bool): + Required. Currently, this field must always be set to + ``true``. This confirms the deletion of this field from any + tags using this field. ``force = false`` will be supported + in the future. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class ListTagsRequest(proto.Message): + r"""Request message for + [ListTags][google.cloud.datacatalog.v1beta1.DataCatalog.ListTags]. + + Attributes: + parent (str): + Required. The name of the Data Catalog resource to list the + tags of. The resource could be an + [Entry][google.cloud.datacatalog.v1beta1.Entry] or an + [EntryGroup][google.cloud.datacatalog.v1beta1.EntryGroup]. + + Examples: + + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + page_size (int): + The maximum number of tags to return. Default + is 10. Max limit is 1000. + page_token (str): + Token that specifies which page is requested. + If empty, the first page is returned. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListTagsResponse(proto.Message): + r"""Response message for + [ListTags][google.cloud.datacatalog.v1beta1.DataCatalog.ListTags]. + + Attributes: + tags (MutableSequence[google.cloud.datacatalog_v1beta1.types.Tag]): + [Tag][google.cloud.datacatalog.v1beta1.Tag] details. + next_page_token (str): + Token to retrieve the next page of results. + It is set to empty if no items remain in + results. + """ + + @property + def raw_page(self): + return self + + tags: MutableSequence[gcd_tags.Tag] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcd_tags.Tag, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListEntriesRequest(proto.Message): + r"""Request message for + [ListEntries][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntries]. + + Attributes: + parent (str): + Required. The name of the entry group that contains the + entries, which can be provided in URL format. Example: + + - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} + page_size (int): + The maximum number of items to return. Default is 10. Max + limit is 1000. Throws an invalid argument for + ``page_size > 1000``. + page_token (str): + Token that specifies which page is requested. + If empty, the first page is returned. + read_mask (google.protobuf.field_mask_pb2.FieldMask): + The fields to return for each Entry. If not set or empty, + all fields are returned. For example, setting read_mask to + contain only one path "name" will cause ListEntries to + return a list of Entries with only "name" field. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + read_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=4, + message=field_mask_pb2.FieldMask, + ) + + +class ListEntriesResponse(proto.Message): + r"""Response message for + [ListEntries][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntries]. + + Attributes: + entries (MutableSequence[google.cloud.datacatalog_v1beta1.types.Entry]): + Entry details. + next_page_token (str): + Token to retrieve the next page of results. + It is set to empty if no items remain in + results. + """ + + @property + def raw_page(self): + return self + + entries: MutableSequence['Entry'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Entry', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/gcs_fileset_spec.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/gcs_fileset_spec.py new file mode 100644 index 000000000000..9918d07ef189 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/gcs_fileset_spec.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.datacatalog_v1beta1.types import timestamps + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1beta1', + manifest={ + 'GcsFilesetSpec', + 'GcsFileSpec', + }, +) + + +class GcsFilesetSpec(proto.Message): + r"""Describes a Cloud Storage fileset entry. + + Attributes: + file_patterns (MutableSequence[str]): + Required. Patterns to identify a set of files in Google + Cloud Storage. See `Cloud Storage + documentation `__ + for more information. Note that bucket wildcards are + currently not supported. + + Examples of valid file_patterns: + + - ``gs://bucket_name/dir/*``: matches all files within + ``bucket_name/dir`` directory. + - ``gs://bucket_name/dir/**``: matches all files in + ``bucket_name/dir`` spanning all subdirectories. + - ``gs://bucket_name/file*``: matches files prefixed by + ``file`` in ``bucket_name`` + - ``gs://bucket_name/??.txt``: matches files with two + characters followed by ``.txt`` in ``bucket_name`` + - ``gs://bucket_name/[aeiou].txt``: matches files that + contain a single vowel character followed by ``.txt`` in + ``bucket_name`` + - ``gs://bucket_name/[a-m].txt``: matches files that + contain ``a``, ``b``, ... or ``m`` followed by ``.txt`` + in ``bucket_name`` + - ``gs://bucket_name/a/*/b``: matches all files in + ``bucket_name`` that match ``a/*/b`` pattern, such as + ``a/c/b``, ``a/d/b`` + - ``gs://another_bucket/a.txt``: matches + ``gs://another_bucket/a.txt`` + + You can combine wildcards to provide more powerful matches, + for example: + + - ``gs://bucket_name/[a-m]??.j*g`` + sample_gcs_file_specs (MutableSequence[google.cloud.datacatalog_v1beta1.types.GcsFileSpec]): + Output only. Sample files contained in this + fileset, not all files contained in this fileset + are represented here. + """ + + file_patterns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + sample_gcs_file_specs: MutableSequence['GcsFileSpec'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='GcsFileSpec', + ) + + +class GcsFileSpec(proto.Message): + r"""Specifications of a single file in Cloud Storage. + + Attributes: + file_path (str): + Required. The full file path. Example: + ``gs://bucket_name/a/b.txt``. + gcs_timestamps (google.cloud.datacatalog_v1beta1.types.SystemTimestamps): + Output only. Timestamps about the Cloud + Storage file. + size_bytes (int): + Output only. The size of the file, in bytes. + """ + + file_path: str = proto.Field( + proto.STRING, + number=1, + ) + gcs_timestamps: timestamps.SystemTimestamps = proto.Field( + proto.MESSAGE, + number=2, + message=timestamps.SystemTimestamps, + ) + size_bytes: int = proto.Field( + proto.INT64, + number=4, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/policytagmanager.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/policytagmanager.py new file mode 100644 index 000000000000..112b99a8a486 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/policytagmanager.py @@ -0,0 +1,520 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.datacatalog_v1beta1.types import common +from google.cloud.datacatalog_v1beta1.types import timestamps +from google.protobuf import field_mask_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1beta1', + manifest={ + 'Taxonomy', + 'PolicyTag', + 'CreateTaxonomyRequest', + 'DeleteTaxonomyRequest', + 'UpdateTaxonomyRequest', + 'ListTaxonomiesRequest', + 'ListTaxonomiesResponse', + 'GetTaxonomyRequest', + 'CreatePolicyTagRequest', + 'DeletePolicyTagRequest', + 'UpdatePolicyTagRequest', + 'ListPolicyTagsRequest', + 'ListPolicyTagsResponse', + 'GetPolicyTagRequest', + }, +) + + +class Taxonomy(proto.Message): + r"""A taxonomy is a collection of policy tags that classify data along a + common axis. For instance a data *sensitivity* taxonomy could + contain policy tags denoting PII such as age, zipcode, and SSN. A + data *origin* taxonomy could contain policy tags to distinguish user + data, employee data, partner data, public data. + + Attributes: + name (str): + Output only. Resource name of this taxonomy, whose format + is: + "projects/{project_number}/locations/{location_id}/taxonomies/{id}". + display_name (str): + Required. User defined name of this taxonomy. + It must: contain only unicode letters, numbers, + underscores, dashes and spaces; not start or end + with spaces; and be at most 200 bytes long when + encoded in UTF-8. + + The taxonomy display name must be unique within + an organization. + description (str): + Optional. Description of this taxonomy. It + must: contain only unicode characters, tabs, + newlines, carriage returns and page breaks; and + be at most 2000 bytes long when encoded in + UTF-8. If not set, defaults to an empty + description. + policy_tag_count (int): + Output only. Number of policy tags contained + in this taxonomy. + taxonomy_timestamps (google.cloud.datacatalog_v1beta1.types.SystemTimestamps): + Output only. Timestamps about this taxonomy. Only + create_time and update_time are used. + activated_policy_types (MutableSequence[google.cloud.datacatalog_v1beta1.types.Taxonomy.PolicyType]): + Optional. A list of policy types that are + activated for this taxonomy. If not set, + defaults to an empty list. + service (google.cloud.datacatalog_v1beta1.types.Taxonomy.Service): + Output only. Identity of the service which + owns the Taxonomy. This field is only populated + when the taxonomy is created by a Google Cloud + service. Currently only 'DATAPLEX' is supported. + """ + class PolicyType(proto.Enum): + r"""Defines policy types where policy tag can be used for. + + Values: + POLICY_TYPE_UNSPECIFIED (0): + Unspecified policy type. + FINE_GRAINED_ACCESS_CONTROL (1): + Fine grained access control policy, which + enables access control on tagged resources. + """ + POLICY_TYPE_UNSPECIFIED = 0 + FINE_GRAINED_ACCESS_CONTROL = 1 + + class Service(proto.Message): + r"""The source system of the Taxonomy. + + Attributes: + name (google.cloud.datacatalog_v1beta1.types.ManagingSystem): + The Google Cloud service name. + identity (str): + The service agent for the service. + """ + + name: common.ManagingSystem = proto.Field( + proto.ENUM, + number=1, + enum=common.ManagingSystem, + ) + identity: str = proto.Field( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + policy_tag_count: int = proto.Field( + proto.INT32, + number=4, + ) + taxonomy_timestamps: timestamps.SystemTimestamps = proto.Field( + proto.MESSAGE, + number=5, + message=timestamps.SystemTimestamps, + ) + activated_policy_types: MutableSequence[PolicyType] = proto.RepeatedField( + proto.ENUM, + number=6, + enum=PolicyType, + ) + service: Service = proto.Field( + proto.MESSAGE, + number=7, + message=Service, + ) + + +class PolicyTag(proto.Message): + r"""Denotes one policy tag in a taxonomy (e.g. ssn). Policy Tags + can be defined in a hierarchy. For example, consider the + following hierarchy: + + Geolocation -> (LatLong, City, ZipCode). PolicyTag + "Geolocation" contains three child policy tags: "LatLong", + "City", and "ZipCode". + + Attributes: + name (str): + Output only. Resource name of this policy tag, whose format + is: + "projects/{project_number}/locations/{location_id}/taxonomies/{taxonomy_id}/policyTags/{id}". + display_name (str): + Required. User defined name of this policy + tag. It must: be unique within the parent + taxonomy; contain only unicode letters, numbers, + underscores, dashes and spaces; not start or end + with spaces; and be at most 200 bytes long when + encoded in UTF-8. + description (str): + Description of this policy tag. It must: + contain only unicode characters, tabs, newlines, + carriage returns and page breaks; and be at most + 2000 bytes long when encoded in UTF-8. If not + set, defaults to an empty description. If not + set, defaults to an empty description. + parent_policy_tag (str): + Resource name of this policy tag's parent + policy tag (e.g. for the "LatLong" policy tag in + the example above, this field contains the + resource name of the "Geolocation" policy tag). + If empty, it means this policy tag is a top + level policy tag (e.g. this field is empty for + the "Geolocation" policy tag in the example + above). If not set, defaults to an empty string. + child_policy_tags (MutableSequence[str]): + Output only. Resource names of child policy + tags of this policy tag. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + parent_policy_tag: str = proto.Field( + proto.STRING, + number=4, + ) + child_policy_tags: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + + +class CreateTaxonomyRequest(proto.Message): + r"""Request message for + [CreateTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.CreateTaxonomy]. + + Attributes: + parent (str): + Required. Resource name of the project that + the taxonomy will belong to. + taxonomy (google.cloud.datacatalog_v1beta1.types.Taxonomy): + The taxonomy to be created. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + taxonomy: 'Taxonomy' = proto.Field( + proto.MESSAGE, + number=2, + message='Taxonomy', + ) + + +class DeleteTaxonomyRequest(proto.Message): + r"""Request message for + [DeleteTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.DeleteTaxonomy]. + + Attributes: + name (str): + Required. Resource name of the taxonomy to be + deleted. All policy tags in this taxonomy will + also be deleted. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateTaxonomyRequest(proto.Message): + r"""Request message for + [UpdateTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.UpdateTaxonomy]. + + Attributes: + taxonomy (google.cloud.datacatalog_v1beta1.types.Taxonomy): + The taxonomy to update. Only description, display_name, and + activated policy types can be updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The update mask applies to the resource. For the + ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + If not set, defaults to all of the fields that are allowed + to update. + """ + + taxonomy: 'Taxonomy' = proto.Field( + proto.MESSAGE, + number=1, + message='Taxonomy', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class ListTaxonomiesRequest(proto.Message): + r"""Request message for + [ListTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListTaxonomies]. + + Attributes: + parent (str): + Required. Resource name of the project to + list the taxonomies of. + page_size (int): + The maximum number of items to return. Must + be a value between 1 and 1000. If not set, + defaults to 50. + page_token (str): + The next_page_token value returned from a previous list + request, if any. If not set, defaults to an empty string. + filter (str): + Supported field for filter is 'service' and + value is 'dataplex'. Eg: service=dataplex. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListTaxonomiesResponse(proto.Message): + r"""Response message for + [ListTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListTaxonomies]. + + Attributes: + taxonomies (MutableSequence[google.cloud.datacatalog_v1beta1.types.Taxonomy]): + Taxonomies that the project contains. + next_page_token (str): + Token used to retrieve the next page of + results, or empty if there are no more results + in the list. + """ + + @property + def raw_page(self): + return self + + taxonomies: MutableSequence['Taxonomy'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Taxonomy', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetTaxonomyRequest(proto.Message): + r"""Request message for + [GetTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.GetTaxonomy]. + + Attributes: + name (str): + Required. Resource name of the requested + taxonomy. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreatePolicyTagRequest(proto.Message): + r"""Request message for + [CreatePolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.CreatePolicyTag]. + + Attributes: + parent (str): + Required. Resource name of the taxonomy that + the policy tag will belong to. + policy_tag (google.cloud.datacatalog_v1beta1.types.PolicyTag): + The policy tag to be created. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + policy_tag: 'PolicyTag' = proto.Field( + proto.MESSAGE, + number=2, + message='PolicyTag', + ) + + +class DeletePolicyTagRequest(proto.Message): + r"""Request message for + [DeletePolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.DeletePolicyTag]. + + Attributes: + name (str): + Required. Resource name of the policy tag to + be deleted. All of its descendant policy tags + will also be deleted. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdatePolicyTagRequest(proto.Message): + r"""Request message for + [UpdatePolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.UpdatePolicyTag]. + + Attributes: + policy_tag (google.cloud.datacatalog_v1beta1.types.PolicyTag): + The policy tag to update. Only the description, + display_name, and parent_policy_tag fields can be updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The update mask applies to the resource. Only display_name, + description and parent_policy_tag can be updated and thus + can be listed in the mask. If update_mask is not provided, + all allowed fields (i.e. display_name, description and + parent) will be updated. For more information including the + ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + If not set, defaults to all of the fields that are allowed + to update. + """ + + policy_tag: 'PolicyTag' = proto.Field( + proto.MESSAGE, + number=1, + message='PolicyTag', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class ListPolicyTagsRequest(proto.Message): + r"""Request message for + [ListPolicyTags][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListPolicyTags]. + + Attributes: + parent (str): + Required. Resource name of the taxonomy to + list the policy tags of. + page_size (int): + The maximum number of items to return. Must + be a value between 1 and 1000. If not set, + defaults to 50. + page_token (str): + The next_page_token value returned from a previous List + request, if any. If not set, defaults to an empty string. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListPolicyTagsResponse(proto.Message): + r"""Response message for + [ListPolicyTags][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListPolicyTags]. + + Attributes: + policy_tags (MutableSequence[google.cloud.datacatalog_v1beta1.types.PolicyTag]): + The policy tags that are in the requested + taxonomy. + next_page_token (str): + Token used to retrieve the next page of + results, or empty if there are no more results + in the list. + """ + + @property + def raw_page(self): + return self + + policy_tags: MutableSequence['PolicyTag'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='PolicyTag', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetPolicyTagRequest(proto.Message): + r"""Request message for + [GetPolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.GetPolicyTag]. + + Attributes: + name (str): + Required. Resource name of the requested + policy tag. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py new file mode 100644 index 000000000000..2ea1f5bc52ab --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py @@ -0,0 +1,234 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.datacatalog_v1beta1.types import policytagmanager + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1beta1', + manifest={ + 'SerializedTaxonomy', + 'SerializedPolicyTag', + 'ImportTaxonomiesRequest', + 'InlineSource', + 'ImportTaxonomiesResponse', + 'ExportTaxonomiesRequest', + 'ExportTaxonomiesResponse', + }, +) + + +class SerializedTaxonomy(proto.Message): + r"""Message capturing a taxonomy and its policy tag hierarchy as + a nested proto. Used for taxonomy import/export and mutation. + + Attributes: + display_name (str): + Required. Display name of the taxonomy. Max + 200 bytes when encoded in UTF-8. + description (str): + Description of the serialized taxonomy. The + length of the description is limited to 2000 + bytes when encoded in UTF-8. If not set, + defaults to an empty description. + policy_tags (MutableSequence[google.cloud.datacatalog_v1beta1.types.SerializedPolicyTag]): + Top level policy tags associated with the + taxonomy if any. + activated_policy_types (MutableSequence[google.cloud.datacatalog_v1beta1.types.Taxonomy.PolicyType]): + A list of policy types that are activated for + a taxonomy. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + policy_tags: MutableSequence['SerializedPolicyTag'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='SerializedPolicyTag', + ) + activated_policy_types: MutableSequence[policytagmanager.Taxonomy.PolicyType] = proto.RepeatedField( + proto.ENUM, + number=4, + enum=policytagmanager.Taxonomy.PolicyType, + ) + + +class SerializedPolicyTag(proto.Message): + r"""Message representing one policy tag when exported as a nested + proto. + + Attributes: + policy_tag (str): + Resource name of the policy tag. + + This field will be ignored when calling + ImportTaxonomies. + display_name (str): + Required. Display name of the policy tag. Max + 200 bytes when encoded in UTF-8. + description (str): + Description of the serialized policy tag. The + length of the description is limited to 2000 + bytes when encoded in UTF-8. If not set, + defaults to an empty description. + child_policy_tags (MutableSequence[google.cloud.datacatalog_v1beta1.types.SerializedPolicyTag]): + Children of the policy tag if any. + """ + + policy_tag: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + child_policy_tags: MutableSequence['SerializedPolicyTag'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='SerializedPolicyTag', + ) + + +class ImportTaxonomiesRequest(proto.Message): + r"""Request message for + [ImportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ImportTaxonomies]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. Resource name of project that the + imported taxonomies will belong to. + inline_source (google.cloud.datacatalog_v1beta1.types.InlineSource): + Inline source used for taxonomies to be + imported. + + This field is a member of `oneof`_ ``source``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + inline_source: 'InlineSource' = proto.Field( + proto.MESSAGE, + number=2, + oneof='source', + message='InlineSource', + ) + + +class InlineSource(proto.Message): + r"""Inline source used for taxonomies import. + + Attributes: + taxonomies (MutableSequence[google.cloud.datacatalog_v1beta1.types.SerializedTaxonomy]): + Required. Taxonomies to be imported. + """ + + taxonomies: MutableSequence['SerializedTaxonomy'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='SerializedTaxonomy', + ) + + +class ImportTaxonomiesResponse(proto.Message): + r"""Response message for + [ImportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ImportTaxonomies]. + + Attributes: + taxonomies (MutableSequence[google.cloud.datacatalog_v1beta1.types.Taxonomy]): + Taxonomies that were imported. + """ + + taxonomies: MutableSequence[policytagmanager.Taxonomy] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=policytagmanager.Taxonomy, + ) + + +class ExportTaxonomiesRequest(proto.Message): + r"""Request message for + [ExportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ExportTaxonomies]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. Resource name of the project that + taxonomies to be exported will share. + taxonomies (MutableSequence[str]): + Required. Resource names of the taxonomies to + be exported. + serialized_taxonomies (bool): + Export taxonomies as serialized taxonomies. + + This field is a member of `oneof`_ ``destination``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + taxonomies: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + serialized_taxonomies: bool = proto.Field( + proto.BOOL, + number=3, + oneof='destination', + ) + + +class ExportTaxonomiesResponse(proto.Message): + r"""Response message for + [ExportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ExportTaxonomies]. + + Attributes: + taxonomies (MutableSequence[google.cloud.datacatalog_v1beta1.types.SerializedTaxonomy]): + List of taxonomies and policy tags in a tree + structure. + """ + + taxonomies: MutableSequence['SerializedTaxonomy'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='SerializedTaxonomy', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/schema.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/schema.py new file mode 100644 index 000000000000..152d45255fc8 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/schema.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1beta1', + manifest={ + 'Schema', + 'ColumnSchema', + }, +) + + +class Schema(proto.Message): + r"""Represents a schema (e.g. BigQuery, GoogleSQL, Avro schema). + + Attributes: + columns (MutableSequence[google.cloud.datacatalog_v1beta1.types.ColumnSchema]): + Required. Schema of columns. A maximum of + 10,000 columns and sub-columns can be specified. + """ + + columns: MutableSequence['ColumnSchema'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='ColumnSchema', + ) + + +class ColumnSchema(proto.Message): + r"""Representation of a column within a schema. Columns could be + nested inside other columns. + + Attributes: + column (str): + Required. Name of the column. + type_ (str): + Required. Type of the column. + description (str): + Optional. Description of the column. Default + value is an empty string. + mode (str): + Optional. A column's mode indicates whether the values in + this column are required, nullable, etc. Only ``NULLABLE``, + ``REQUIRED`` and ``REPEATED`` are supported. Default mode is + ``NULLABLE``. + subcolumns (MutableSequence[google.cloud.datacatalog_v1beta1.types.ColumnSchema]): + Optional. Schema of sub-columns. A column can + have zero or more sub-columns. + """ + + column: str = proto.Field( + proto.STRING, + number=6, + ) + type_: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + mode: str = proto.Field( + proto.STRING, + number=3, + ) + subcolumns: MutableSequence['ColumnSchema'] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='ColumnSchema', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/search.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/search.py new file mode 100644 index 000000000000..46c4f17e7c4b --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/search.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1beta1', + manifest={ + 'SearchResultType', + 'SearchCatalogResult', + }, +) + + +class SearchResultType(proto.Enum): + r"""The different types of resources that can be returned in + search. + + Values: + SEARCH_RESULT_TYPE_UNSPECIFIED (0): + Default unknown type. + ENTRY (1): + An [Entry][google.cloud.datacatalog.v1beta1.Entry]. + TAG_TEMPLATE (2): + A + [TagTemplate][google.cloud.datacatalog.v1beta1.TagTemplate]. + ENTRY_GROUP (3): + An + [EntryGroup][google.cloud.datacatalog.v1beta1.EntryGroup]. + """ + SEARCH_RESULT_TYPE_UNSPECIFIED = 0 + ENTRY = 1 + TAG_TEMPLATE = 2 + ENTRY_GROUP = 3 + + +class SearchCatalogResult(proto.Message): + r"""A result that appears in the response of a search request. + Each result captures details of one entry that matches the + search. + + Attributes: + search_result_type (google.cloud.datacatalog_v1beta1.types.SearchResultType): + Type of the search result. This field can be + used to determine which Get method to call to + fetch the full resource. + search_result_subtype (str): + Sub-type of the search result. This is a dot-delimited + description of the resource's full type, and is the same as + the value callers would provide in the "type" search facet. + Examples: ``entry.table``, ``entry.dataStream``, + ``tagTemplate``. + relative_resource_name (str): + The relative resource name of the resource in URL format. + Examples: + + - ``projects/{project_id}/locations/{location_id}/entryGroups/{entry_group_id}/entries/{entry_id}`` + - ``projects/{project_id}/tagTemplates/{tag_template_id}`` + linked_resource (str): + The full name of the cloud resource the entry belongs to. + See: + https://cloud.google.com/apis/design/resource_names#full_resource_name. + Example: + + - ``//bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId`` + modify_time (google.protobuf.timestamp_pb2.Timestamp): + Last-modified timestamp of the entry from the + managing system. + """ + + search_result_type: 'SearchResultType' = proto.Field( + proto.ENUM, + number=1, + enum='SearchResultType', + ) + search_result_subtype: str = proto.Field( + proto.STRING, + number=2, + ) + relative_resource_name: str = proto.Field( + proto.STRING, + number=3, + ) + linked_resource: str = proto.Field( + proto.STRING, + number=4, + ) + modify_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/table_spec.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/table_spec.py new file mode 100644 index 000000000000..5eadf5fd9317 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/table_spec.py @@ -0,0 +1,165 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1beta1', + manifest={ + 'TableSourceType', + 'BigQueryTableSpec', + 'ViewSpec', + 'TableSpec', + 'BigQueryDateShardedSpec', + }, +) + + +class TableSourceType(proto.Enum): + r"""Table source type. + + Values: + TABLE_SOURCE_TYPE_UNSPECIFIED (0): + Default unknown type. + BIGQUERY_VIEW (2): + Table view. + BIGQUERY_TABLE (5): + BigQuery native table. + BIGQUERY_MATERIALIZED_VIEW (7): + BigQuery materialized view. + """ + TABLE_SOURCE_TYPE_UNSPECIFIED = 0 + BIGQUERY_VIEW = 2 + BIGQUERY_TABLE = 5 + BIGQUERY_MATERIALIZED_VIEW = 7 + + +class BigQueryTableSpec(proto.Message): + r"""Describes a BigQuery table. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + table_source_type (google.cloud.datacatalog_v1beta1.types.TableSourceType): + Output only. The table source type. + view_spec (google.cloud.datacatalog_v1beta1.types.ViewSpec): + Table view specification. This field should only be + populated if ``table_source_type`` is ``BIGQUERY_VIEW``. + + This field is a member of `oneof`_ ``type_spec``. + table_spec (google.cloud.datacatalog_v1beta1.types.TableSpec): + Spec of a BigQuery table. This field should only be + populated if ``table_source_type`` is ``BIGQUERY_TABLE``. + + This field is a member of `oneof`_ ``type_spec``. + """ + + table_source_type: 'TableSourceType' = proto.Field( + proto.ENUM, + number=1, + enum='TableSourceType', + ) + view_spec: 'ViewSpec' = proto.Field( + proto.MESSAGE, + number=2, + oneof='type_spec', + message='ViewSpec', + ) + table_spec: 'TableSpec' = proto.Field( + proto.MESSAGE, + number=3, + oneof='type_spec', + message='TableSpec', + ) + + +class ViewSpec(proto.Message): + r"""Table view specification. + + Attributes: + view_query (str): + Output only. The query that defines the table + view. + """ + + view_query: str = proto.Field( + proto.STRING, + number=1, + ) + + +class TableSpec(proto.Message): + r"""Normal BigQuery table spec. + + Attributes: + grouped_entry (str): + Output only. If the table is a dated shard, i.e., with name + pattern ``[prefix]YYYYMMDD``, ``grouped_entry`` is the Data + Catalog resource name of the date sharded grouped entry, for + example, + ``projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}``. + Otherwise, ``grouped_entry`` is empty. + """ + + grouped_entry: str = proto.Field( + proto.STRING, + number=1, + ) + + +class BigQueryDateShardedSpec(proto.Message): + r"""Spec for a group of BigQuery tables with name pattern + ``[prefix]YYYYMMDD``. Context: + https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning_versus_sharding + + Attributes: + dataset (str): + Output only. The Data Catalog resource name of the dataset + entry the current table belongs to, for example, + ``projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}``. + table_prefix (str): + Output only. The table name prefix of the shards. The name + of any given shard is ``[table_prefix]YYYYMMDD``, for + example, for shard ``MyTable20180101``, the ``table_prefix`` + is ``MyTable``. + shard_count (int): + Output only. Total number of shards. + """ + + dataset: str = proto.Field( + proto.STRING, + number=1, + ) + table_prefix: str = proto.Field( + proto.STRING, + number=2, + ) + shard_count: int = proto.Field( + proto.INT64, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/tags.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/tags.py new file mode 100644 index 000000000000..b9d13daa19a4 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/tags.py @@ -0,0 +1,407 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1beta1', + manifest={ + 'Tag', + 'TagField', + 'TagTemplate', + 'TagTemplateField', + 'FieldType', + }, +) + + +class Tag(proto.Message): + r"""Tags are used to attach custom metadata to Data Catalog resources. + Tags conform to the specifications within their tag template. + + See `Data Catalog + IAM `__ for + information on the permissions needed to create or view tags. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The resource name of the tag in URL format. Example: + + - projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id} + + where ``tag_id`` is a system-generated identifier. Note that + this Tag may not actually be stored in the location in this + name. + template (str): + Required. The resource name of the tag template that this + tag uses. Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + + This field cannot be modified after creation. + template_display_name (str): + Output only. The display name of the tag + template. + column (str): + Resources like Entry can have schemas associated with them. + This scope allows users to attach tags to an individual + column based on that schema. + + For attaching a tag to a nested column, use ``.`` to + separate the column names. Example: + + - ``outer_column.inner_column`` + + This field is a member of `oneof`_ ``scope``. + fields (MutableMapping[str, google.cloud.datacatalog_v1beta1.types.TagField]): + Required. This maps the ID of a tag field to + the value of and additional information about + that field. Valid field IDs are defined by the + tag's template. A tag must have at least 1 field + and at most 500 fields. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + template: str = proto.Field( + proto.STRING, + number=2, + ) + template_display_name: str = proto.Field( + proto.STRING, + number=5, + ) + column: str = proto.Field( + proto.STRING, + number=4, + oneof='scope', + ) + fields: MutableMapping[str, 'TagField'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=3, + message='TagField', + ) + + +class TagField(proto.Message): + r"""Contains the value and supporting information for a field within a + [Tag][google.cloud.datacatalog.v1beta1.Tag]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + display_name (str): + Output only. The display name of this field. + double_value (float): + Holds the value for a tag field with double + type. + + This field is a member of `oneof`_ ``kind``. + string_value (str): + Holds the value for a tag field with string + type. + + This field is a member of `oneof`_ ``kind``. + bool_value (bool): + Holds the value for a tag field with boolean + type. + + This field is a member of `oneof`_ ``kind``. + timestamp_value (google.protobuf.timestamp_pb2.Timestamp): + Holds the value for a tag field with + timestamp type. + + This field is a member of `oneof`_ ``kind``. + enum_value (google.cloud.datacatalog_v1beta1.types.TagField.EnumValue): + Holds the value for a tag field with enum + type. This value must be one of the allowed + values in the definition of this enum. + + This field is a member of `oneof`_ ``kind``. + order (int): + Output only. The order of this field with respect to other + fields in this tag. It can be set in + [Tag][google.cloud.datacatalog.v1beta1.TagTemplateField.order]. + For example, a higher value can indicate a more important + field. The value can be negative. Multiple fields can have + the same order, and field orders within a tag do not have to + be sequential. + """ + + class EnumValue(proto.Message): + r"""Holds an enum value. + + Attributes: + display_name (str): + The display name of the enum value. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + double_value: float = proto.Field( + proto.DOUBLE, + number=2, + oneof='kind', + ) + string_value: str = proto.Field( + proto.STRING, + number=3, + oneof='kind', + ) + bool_value: bool = proto.Field( + proto.BOOL, + number=4, + oneof='kind', + ) + timestamp_value: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + oneof='kind', + message=timestamp_pb2.Timestamp, + ) + enum_value: EnumValue = proto.Field( + proto.MESSAGE, + number=6, + oneof='kind', + message=EnumValue, + ) + order: int = proto.Field( + proto.INT32, + number=7, + ) + + +class TagTemplate(proto.Message): + r"""A tag template defines a tag, which can have one or more typed + fields. The template is used to create and attach the tag to Google + Cloud resources. `Tag template + roles `__ + provide permissions to create, edit, and use the template. See, for + example, the `TagTemplate + User `__ + role, which includes permission to use the tag template to tag + resources. + + Attributes: + name (str): + The resource name of the tag template in URL format. + Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + + Note that this TagTemplate and its child resources may not + actually be stored in the location in this name. + display_name (str): + The display name for this template. Defaults + to an empty string. + fields (MutableMapping[str, google.cloud.datacatalog_v1beta1.types.TagTemplateField]): + Required. Map of tag template field IDs to the settings for + the field. This map is an exhaustive list of the allowed + fields. This map must contain at least one field and at most + 500 fields. + + The keys to this map are tag template field IDs. Field IDs + can contain letters (both uppercase and lowercase), numbers + (0-9) and underscores (_). Field IDs must be at least 1 + character long and at most 64 characters long. Field IDs + must start with a letter or underscore. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + fields: MutableMapping[str, 'TagTemplateField'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=3, + message='TagTemplateField', + ) + + +class TagTemplateField(proto.Message): + r"""The template for an individual field within a tag template. + + Attributes: + name (str): + Output only. The resource name of the tag template field in + URL format. Example: + + - projects/{project_id}/locations/{location}/tagTemplates/{tag_template}/fields/{field} + + Note that this TagTemplateField may not actually be stored + in the location in this name. + display_name (str): + The display name for this field. Defaults to + an empty string. + type_ (google.cloud.datacatalog_v1beta1.types.FieldType): + Required. The type of value this tag field + can contain. + is_required (bool): + Whether this is a required field. Defaults to + false. + description (str): + The description for this field. Defaults to + an empty string. + order (int): + The order of this field with respect to other + fields in this tag template. A higher value + indicates a more important field. The value can + be negative. Multiple fields can have the same + order, and field orders within a tag do not have + to be sequential. + """ + + name: str = proto.Field( + proto.STRING, + number=6, + ) + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: 'FieldType' = proto.Field( + proto.MESSAGE, + number=2, + message='FieldType', + ) + is_required: bool = proto.Field( + proto.BOOL, + number=3, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + order: int = proto.Field( + proto.INT32, + number=5, + ) + + +class FieldType(proto.Message): + r""" + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + primitive_type (google.cloud.datacatalog_v1beta1.types.FieldType.PrimitiveType): + Represents primitive types - string, bool + etc. + + This field is a member of `oneof`_ ``type_decl``. + enum_type (google.cloud.datacatalog_v1beta1.types.FieldType.EnumType): + Represents an enum type. + + This field is a member of `oneof`_ ``type_decl``. + """ + class PrimitiveType(proto.Enum): + r""" + + Values: + PRIMITIVE_TYPE_UNSPECIFIED (0): + This is the default invalid value for a type. + DOUBLE (1): + A double precision number. + STRING (2): + An UTF-8 string. + BOOL (3): + A boolean value. + TIMESTAMP (4): + A timestamp. + """ + PRIMITIVE_TYPE_UNSPECIFIED = 0 + DOUBLE = 1 + STRING = 2 + BOOL = 3 + TIMESTAMP = 4 + + class EnumType(proto.Message): + r""" + + Attributes: + allowed_values (MutableSequence[google.cloud.datacatalog_v1beta1.types.FieldType.EnumType.EnumValue]): + + """ + + class EnumValue(proto.Message): + r""" + + Attributes: + display_name (str): + Required. The display name of the enum value. + Must not be an empty string. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + + allowed_values: MutableSequence['FieldType.EnumType.EnumValue'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='FieldType.EnumType.EnumValue', + ) + + primitive_type: PrimitiveType = proto.Field( + proto.ENUM, + number=1, + oneof='type_decl', + enum=PrimitiveType, + ) + enum_type: EnumType = proto.Field( + proto.MESSAGE, + number=2, + oneof='type_decl', + message=EnumType, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/timestamps.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/timestamps.py new file mode 100644 index 000000000000..ec7f95ba634e --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/timestamps.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1beta1', + manifest={ + 'SystemTimestamps', + }, +) + + +class SystemTimestamps(proto.Message): + r"""Timestamps about this resource according to a particular + system. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + The creation time of the resource within the + given system. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The last-modified time of the resource within + the given system. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The expiration time of the + resource within the given system. Currently only + apllicable to BigQuery resources. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/usage.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/usage.py new file mode 100644 index 000000000000..02d3fa0c92e7 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/usage.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.datacatalog.v1beta1', + manifest={ + 'UsageStats', + 'UsageSignal', + }, +) + + +class UsageStats(proto.Message): + r"""Detailed counts on the entry's usage. + Caveats: + + - Only BigQuery tables have usage stats + - The usage stats only include BigQuery query jobs + - The usage stats might be underestimated, e.g. wildcard table + references are not yet counted in usage computation + https://cloud.google.com/bigquery/docs/querying-wildcard-tables + + Attributes: + total_completions (float): + The number of times that the underlying entry + was successfully used. + total_failures (float): + The number of times that the underlying entry + was attempted to be used but failed. + total_cancellations (float): + The number of times that the underlying entry + was attempted to be used but was cancelled by + the user. + total_execution_time_for_completions_millis (float): + Total time spent (in milliseconds) during + uses the resulted in completions. + """ + + total_completions: float = proto.Field( + proto.FLOAT, + number=1, + ) + total_failures: float = proto.Field( + proto.FLOAT, + number=2, + ) + total_cancellations: float = proto.Field( + proto.FLOAT, + number=3, + ) + total_execution_time_for_completions_millis: float = proto.Field( + proto.FLOAT, + number=4, + ) + + +class UsageSignal(proto.Message): + r"""The set of all usage signals that we store in Data Catalog. + + Attributes: + update_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp of the end of the usage + statistics duration. + usage_within_time_range (MutableMapping[str, google.cloud.datacatalog_v1beta1.types.UsageStats]): + Usage statistics over each of the pre-defined + time ranges, supported strings for time ranges + are {"24H", "7D", "30D"}. + """ + + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + usage_within_time_range: MutableMapping[str, 'UsageStats'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=2, + message='UsageStats', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/mypy.ini b/owl-bot-staging/google-cloud-datacatalog/v1beta1/mypy.ini new file mode 100644 index 000000000000..574c5aed394b --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/noxfile.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/noxfile.py new file mode 100644 index 000000000000..c553519e1586 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/noxfile.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.11" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "lint_setup_py", +] + +@nox.session(python=ALL_PYTHON) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/datacatalog_v1beta1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_async.py new file mode 100644 index 000000000000..2a4139403363 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_CreateEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_create_entry(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + entry = datacatalog_v1beta1.Entry() + entry.type_ = "FILESET" + entry.integrated_system = "CLOUD_PUBSUB" + entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] + + request = datacatalog_v1beta1.CreateEntryRequest( + parent="parent_value", + entry_id="entry_id_value", + entry=entry, + ) + + # Make the request + response = await client.create_entry(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_CreateEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_group_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_group_async.py new file mode 100644 index 000000000000..8e52263e6986 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_group_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_CreateEntryGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_create_entry_group(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.CreateEntryGroupRequest( + parent="parent_value", + entry_group_id="entry_group_id_value", + ) + + # Make the request + response = await client.create_entry_group(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_CreateEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_group_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_group_sync.py new file mode 100644 index 000000000000..0fb557a1b7a4 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_group_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_CreateEntryGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_create_entry_group(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.CreateEntryGroupRequest( + parent="parent_value", + entry_group_id="entry_group_id_value", + ) + + # Make the request + response = client.create_entry_group(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_CreateEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_sync.py new file mode 100644 index 000000000000..b114478ce776 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_CreateEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_create_entry(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + entry = datacatalog_v1beta1.Entry() + entry.type_ = "FILESET" + entry.integrated_system = "CLOUD_PUBSUB" + entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] + + request = datacatalog_v1beta1.CreateEntryRequest( + parent="parent_value", + entry_id="entry_id_value", + entry=entry, + ) + + # Make the request + response = client.create_entry(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_CreateEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_async.py new file mode 100644 index 000000000000..ec5863396bc1 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_CreateTag_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_create_tag(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + tag = datacatalog_v1beta1.Tag() + tag.column = "column_value" + tag.template = "template_value" + + request = datacatalog_v1beta1.CreateTagRequest( + parent="parent_value", + tag=tag, + ) + + # Make the request + response = await client.create_tag(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_CreateTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_sync.py new file mode 100644 index 000000000000..f8821f5cf699 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_CreateTag_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_create_tag(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + tag = datacatalog_v1beta1.Tag() + tag.column = "column_value" + tag.template = "template_value" + + request = datacatalog_v1beta1.CreateTagRequest( + parent="parent_value", + tag=tag, + ) + + # Make the request + response = client.create_tag(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_CreateTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_async.py new file mode 100644 index 000000000000..ae5841be4d02 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTagTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_create_tag_template(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.CreateTagTemplateRequest( + parent="parent_value", + tag_template_id="tag_template_id_value", + ) + + # Make the request + response = await client.create_tag_template(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplate_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_async.py new file mode 100644 index 000000000000..3deeeb4b04b7 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTagTemplateField +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplateField_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_create_tag_template_field(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + tag_template_field = datacatalog_v1beta1.TagTemplateField() + tag_template_field.type_.primitive_type = "TIMESTAMP" + + request = datacatalog_v1beta1.CreateTagTemplateFieldRequest( + parent="parent_value", + tag_template_field_id="tag_template_field_id_value", + tag_template_field=tag_template_field, + ) + + # Make the request + response = await client.create_tag_template_field(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplateField_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_sync.py new file mode 100644 index 000000000000..ed7203812ca1 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTagTemplateField +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplateField_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_create_tag_template_field(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + tag_template_field = datacatalog_v1beta1.TagTemplateField() + tag_template_field.type_.primitive_type = "TIMESTAMP" + + request = datacatalog_v1beta1.CreateTagTemplateFieldRequest( + parent="parent_value", + tag_template_field_id="tag_template_field_id_value", + tag_template_field=tag_template_field, + ) + + # Make the request + response = client.create_tag_template_field(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplateField_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_sync.py new file mode 100644 index 000000000000..0b1feb38d688 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTagTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_create_tag_template(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.CreateTagTemplateRequest( + parent="parent_value", + tag_template_id="tag_template_id_value", + ) + + # Make the request + response = client.create_tag_template(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplate_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_async.py new file mode 100644 index 000000000000..93f72a4d05b0 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_DeleteEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_delete_entry(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteEntryRequest( + name="name_value", + ) + + # Make the request + await client.delete_entry(request=request) + + +# [END datacatalog_v1beta1_generated_DataCatalog_DeleteEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_group_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_group_async.py new file mode 100644 index 000000000000..100f8f206e37 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_group_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_DeleteEntryGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_delete_entry_group(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteEntryGroupRequest( + name="name_value", + ) + + # Make the request + await client.delete_entry_group(request=request) + + +# [END datacatalog_v1beta1_generated_DataCatalog_DeleteEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_group_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_group_sync.py new file mode 100644 index 000000000000..b4e7093432a9 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_group_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_DeleteEntryGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_delete_entry_group(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteEntryGroupRequest( + name="name_value", + ) + + # Make the request + client.delete_entry_group(request=request) + + +# [END datacatalog_v1beta1_generated_DataCatalog_DeleteEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_sync.py new file mode 100644 index 000000000000..03f85f447eb1 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_DeleteEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_delete_entry(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteEntryRequest( + name="name_value", + ) + + # Make the request + client.delete_entry(request=request) + + +# [END datacatalog_v1beta1_generated_DataCatalog_DeleteEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_async.py new file mode 100644 index 000000000000..f8e048c17be4 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_DeleteTag_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_delete_tag(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteTagRequest( + name="name_value", + ) + + # Make the request + await client.delete_tag(request=request) + + +# [END datacatalog_v1beta1_generated_DataCatalog_DeleteTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_sync.py new file mode 100644 index 000000000000..80674f7f2eed --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_DeleteTag_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_delete_tag(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteTagRequest( + name="name_value", + ) + + # Make the request + client.delete_tag(request=request) + + +# [END datacatalog_v1beta1_generated_DataCatalog_DeleteTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_async.py new file mode 100644 index 000000000000..9201de2cd1e6 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTagTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_delete_tag_template(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteTagTemplateRequest( + name="name_value", + force=True, + ) + + # Make the request + await client.delete_tag_template(request=request) + + +# [END datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplate_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_async.py new file mode 100644 index 000000000000..1229caa7a4ab --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTagTemplateField +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplateField_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_delete_tag_template_field(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteTagTemplateFieldRequest( + name="name_value", + force=True, + ) + + # Make the request + await client.delete_tag_template_field(request=request) + + +# [END datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplateField_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_sync.py new file mode 100644 index 000000000000..dd8c0e840d8b --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTagTemplateField +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplateField_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_delete_tag_template_field(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteTagTemplateFieldRequest( + name="name_value", + force=True, + ) + + # Make the request + client.delete_tag_template_field(request=request) + + +# [END datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplateField_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_sync.py new file mode 100644 index 000000000000..330e9bed8b36 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTagTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_delete_tag_template(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteTagTemplateRequest( + name="name_value", + force=True, + ) + + # Make the request + client.delete_tag_template(request=request) + + +# [END datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplate_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_async.py new file mode 100644 index 000000000000..a82d9be4ace5 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_GetEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_get_entry(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.GetEntryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_GetEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_group_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_group_async.py new file mode 100644 index 000000000000..56282040696a --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_group_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_GetEntryGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_get_entry_group(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.GetEntryGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry_group(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_GetEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_group_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_group_sync.py new file mode 100644 index 000000000000..2da6dca76f65 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_group_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_GetEntryGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_get_entry_group(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.GetEntryGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry_group(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_GetEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_sync.py new file mode 100644 index 000000000000..2944e6e6f9b1 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_GetEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_get_entry(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.GetEntryRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_GetEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_iam_policy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_iam_policy_async.py new file mode 100644 index 000000000000..e1c18a42ce5a --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_iam_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_GetIamPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_get_iam_policy(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_GetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_iam_policy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_iam_policy_sync.py new file mode 100644 index 000000000000..b3d71872a427 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_get_iam_policy(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_GetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_tag_template_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_tag_template_async.py new file mode 100644 index 000000000000..8a857df051ec --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_tag_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTagTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_GetTagTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_get_tag_template(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.GetTagTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_tag_template(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_GetTagTemplate_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_tag_template_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_tag_template_sync.py new file mode 100644 index 000000000000..37d7a439c87c --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_tag_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTagTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_GetTagTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_get_tag_template(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.GetTagTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_tag_template(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_GetTagTemplate_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entries_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entries_async.py new file mode 100644 index 000000000000..a7c314f69bf6 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entries_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_ListEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_list_entries(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ListEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entries(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_ListEntries_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entries_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entries_sync.py new file mode 100644 index 000000000000..a28b3ca13b33 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entries_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_ListEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_list_entries(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ListEntriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_ListEntries_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entry_groups_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entry_groups_async.py new file mode 100644 index 000000000000..c8fefd5d8830 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entry_groups_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntryGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_ListEntryGroups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_list_entry_groups(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ListEntryGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_ListEntryGroups_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entry_groups_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entry_groups_sync.py new file mode 100644 index 000000000000..19afe7a10caf --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entry_groups_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEntryGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_ListEntryGroups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_list_entry_groups(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ListEntryGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_entry_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_ListEntryGroups_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_tags_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_tags_async.py new file mode 100644 index 000000000000..f2dac6f015e2 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_tags_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTags +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_ListTags_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_list_tags(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ListTagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tags(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_ListTags_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_tags_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_tags_sync.py new file mode 100644 index 000000000000..daf3a9a17e71 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_tags_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTags +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_ListTags_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_list_tags(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ListTagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_tags(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_ListTags_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_lookup_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_lookup_entry_async.py new file mode 100644 index 000000000000..83248ad5b528 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_lookup_entry_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LookupEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_LookupEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_lookup_entry(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.LookupEntryRequest( + linked_resource="linked_resource_value", + ) + + # Make the request + response = await client.lookup_entry(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_LookupEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_lookup_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_lookup_entry_sync.py new file mode 100644 index 000000000000..6094d0b5c00b --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_lookup_entry_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LookupEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_LookupEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_lookup_entry(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.LookupEntryRequest( + linked_resource="linked_resource_value", + ) + + # Make the request + response = client.lookup_entry(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_LookupEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_async.py new file mode 100644 index 000000000000..33ed2dbfc39e --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RenameTagTemplateField +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateField_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_rename_tag_template_field(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.RenameTagTemplateFieldRequest( + name="name_value", + new_tag_template_field_id="new_tag_template_field_id_value", + ) + + # Make the request + response = await client.rename_tag_template_field(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateField_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_async.py new file mode 100644 index 000000000000..5452195f0b12 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RenameTagTemplateFieldEnumValue +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_rename_tag_template_field_enum_value(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.RenameTagTemplateFieldEnumValueRequest( + name="name_value", + new_enum_value_display_name="new_enum_value_display_name_value", + ) + + # Make the request + response = await client.rename_tag_template_field_enum_value(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py new file mode 100644 index 000000000000..8cca3156d261 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RenameTagTemplateFieldEnumValue +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_rename_tag_template_field_enum_value(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.RenameTagTemplateFieldEnumValueRequest( + name="name_value", + new_enum_value_display_name="new_enum_value_display_name_value", + ) + + # Make the request + response = client.rename_tag_template_field_enum_value(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_sync.py new file mode 100644 index 000000000000..1d6487816a20 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RenameTagTemplateField +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateField_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_rename_tag_template_field(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.RenameTagTemplateFieldRequest( + name="name_value", + new_tag_template_field_id="new_tag_template_field_id_value", + ) + + # Make the request + response = client.rename_tag_template_field(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateField_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_search_catalog_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_search_catalog_async.py new file mode 100644 index 000000000000..85cbeea3781b --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_search_catalog_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchCatalog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_SearchCatalog_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_search_catalog(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.SearchCatalogRequest( + ) + + # Make the request + page_result = client.search_catalog(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_SearchCatalog_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_search_catalog_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_search_catalog_sync.py new file mode 100644 index 000000000000..a9ddb9429768 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_search_catalog_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchCatalog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_SearchCatalog_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_search_catalog(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.SearchCatalogRequest( + ) + + # Make the request + page_result = client.search_catalog(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_SearchCatalog_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_set_iam_policy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_set_iam_policy_async.py new file mode 100644 index 000000000000..dc05b9c411fd --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_set_iam_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_SetIamPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_set_iam_policy(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_SetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_set_iam_policy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_set_iam_policy_sync.py new file mode 100644 index 000000000000..400a3b82736c --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_set_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_set_iam_policy(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_SetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_async.py new file mode 100644 index 000000000000..f654277fe7fa --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_TestIamPermissions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_test_iam_permissions(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_TestIamPermissions_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_sync.py new file mode 100644 index 000000000000..2f7dc90069ce --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_test_iam_permissions(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_TestIamPermissions_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_async.py new file mode 100644 index 000000000000..0c57c832761c --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_UpdateEntry_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_update_entry(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + entry = datacatalog_v1beta1.Entry() + entry.type_ = "FILESET" + entry.integrated_system = "CLOUD_PUBSUB" + entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] + + request = datacatalog_v1beta1.UpdateEntryRequest( + entry=entry, + ) + + # Make the request + response = await client.update_entry(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_UpdateEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_group_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_group_async.py new file mode 100644 index 000000000000..804fffb64e4e --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_group_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_UpdateEntryGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_update_entry_group(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.UpdateEntryGroupRequest( + ) + + # Make the request + response = await client.update_entry_group(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_UpdateEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_group_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_group_sync.py new file mode 100644 index 000000000000..5f58410c2a82 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_group_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntryGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_UpdateEntryGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_update_entry_group(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.UpdateEntryGroupRequest( + ) + + # Make the request + response = client.update_entry_group(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_UpdateEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_sync.py new file mode 100644 index 000000000000..45e5e55b4313 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntry +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_UpdateEntry_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_update_entry(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + entry = datacatalog_v1beta1.Entry() + entry.type_ = "FILESET" + entry.integrated_system = "CLOUD_PUBSUB" + entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] + + request = datacatalog_v1beta1.UpdateEntryRequest( + entry=entry, + ) + + # Make the request + response = client.update_entry(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_UpdateEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_async.py new file mode 100644 index 000000000000..4c1dde679c99 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_UpdateTag_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_update_tag(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + tag = datacatalog_v1beta1.Tag() + tag.column = "column_value" + tag.template = "template_value" + + request = datacatalog_v1beta1.UpdateTagRequest( + tag=tag, + ) + + # Make the request + response = await client.update_tag(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_UpdateTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_sync.py new file mode 100644 index 000000000000..b432ab7b927f --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_UpdateTag_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_update_tag(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + tag = datacatalog_v1beta1.Tag() + tag.column = "column_value" + tag.template = "template_value" + + request = datacatalog_v1beta1.UpdateTagRequest( + tag=tag, + ) + + # Make the request + response = client.update_tag(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_UpdateTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_async.py new file mode 100644 index 000000000000..8a5e268974cb --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTagTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_update_tag_template(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.UpdateTagTemplateRequest( + ) + + # Make the request + response = await client.update_tag_template(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplate_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_async.py new file mode 100644 index 000000000000..62489bdf4b66 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTagTemplateField +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplateField_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_update_tag_template_field(): + # Create a client + client = datacatalog_v1beta1.DataCatalogAsyncClient() + + # Initialize request argument(s) + tag_template_field = datacatalog_v1beta1.TagTemplateField() + tag_template_field.type_.primitive_type = "TIMESTAMP" + + request = datacatalog_v1beta1.UpdateTagTemplateFieldRequest( + name="name_value", + tag_template_field=tag_template_field, + ) + + # Make the request + response = await client.update_tag_template_field(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplateField_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_sync.py new file mode 100644 index 000000000000..7f2c8236d3e2 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTagTemplateField +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplateField_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_update_tag_template_field(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + tag_template_field = datacatalog_v1beta1.TagTemplateField() + tag_template_field.type_.primitive_type = "TIMESTAMP" + + request = datacatalog_v1beta1.UpdateTagTemplateFieldRequest( + name="name_value", + tag_template_field=tag_template_field, + ) + + # Make the request + response = client.update_tag_template_field(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplateField_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_sync.py new file mode 100644 index 000000000000..0874945ed578 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTagTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_update_tag_template(): + # Create a client + client = datacatalog_v1beta1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.UpdateTagTemplateRequest( + ) + + # Make the request + response = client.update_tag_template(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplate_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_async.py new file mode 100644 index 000000000000..a81b726d9e9a --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePolicyTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_CreatePolicyTag_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_create_policy_tag(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.CreatePolicyTagRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_policy_tag(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManager_CreatePolicyTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_sync.py new file mode 100644 index 000000000000..71a62addce00 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePolicyTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_CreatePolicyTag_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_create_policy_tag(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.CreatePolicyTagRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_policy_tag(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManager_CreatePolicyTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_async.py new file mode 100644 index 000000000000..27efa9b68ab7 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_CreateTaxonomy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_create_taxonomy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.CreateTaxonomyRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_taxonomy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManager_CreateTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_sync.py new file mode 100644 index 000000000000..1290fd16f3b3 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_CreateTaxonomy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_create_taxonomy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.CreateTaxonomyRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_taxonomy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManager_CreateTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_async.py new file mode 100644 index 000000000000..3c5a5696eee1 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePolicyTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_DeletePolicyTag_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_delete_policy_tag(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeletePolicyTagRequest( + name="name_value", + ) + + # Make the request + await client.delete_policy_tag(request=request) + + +# [END datacatalog_v1beta1_generated_PolicyTagManager_DeletePolicyTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_sync.py new file mode 100644 index 000000000000..4041a12fc129 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePolicyTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_DeletePolicyTag_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_delete_policy_tag(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeletePolicyTagRequest( + name="name_value", + ) + + # Make the request + client.delete_policy_tag(request=request) + + +# [END datacatalog_v1beta1_generated_PolicyTagManager_DeletePolicyTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_async.py new file mode 100644 index 000000000000..e5b78b19a424 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_DeleteTaxonomy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_delete_taxonomy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteTaxonomyRequest( + name="name_value", + ) + + # Make the request + await client.delete_taxonomy(request=request) + + +# [END datacatalog_v1beta1_generated_PolicyTagManager_DeleteTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_sync.py new file mode 100644 index 000000000000..9885d9ca0591 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_DeleteTaxonomy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_delete_taxonomy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.DeleteTaxonomyRequest( + name="name_value", + ) + + # Make the request + client.delete_taxonomy(request=request) + + +# [END datacatalog_v1beta1_generated_PolicyTagManager_DeleteTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_async.py new file mode 100644 index 000000000000..9641a9c267b2 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_GetIamPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_get_iam_policy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManager_GetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_sync.py new file mode 100644 index 000000000000..9f3007cbc08f --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_get_iam_policy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManager_GetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_async.py new file mode 100644 index 000000000000..2cb680ced5f8 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPolicyTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_GetPolicyTag_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_get_policy_tag(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.GetPolicyTagRequest( + name="name_value", + ) + + # Make the request + response = await client.get_policy_tag(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManager_GetPolicyTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_sync.py new file mode 100644 index 000000000000..7b0434b6b748 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPolicyTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_GetPolicyTag_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_get_policy_tag(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.GetPolicyTagRequest( + name="name_value", + ) + + # Make the request + response = client.get_policy_tag(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManager_GetPolicyTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_async.py new file mode 100644 index 000000000000..3a98f1d61b29 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_GetTaxonomy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_get_taxonomy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.GetTaxonomyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_taxonomy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManager_GetTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_sync.py new file mode 100644 index 000000000000..a2a1fd2f41c3 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_GetTaxonomy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_get_taxonomy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.GetTaxonomyRequest( + name="name_value", + ) + + # Make the request + response = client.get_taxonomy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManager_GetTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_async.py new file mode 100644 index 000000000000..3973328cb7ea --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPolicyTags +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_ListPolicyTags_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_list_policy_tags(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ListPolicyTagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_policy_tags(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManager_ListPolicyTags_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_sync.py new file mode 100644 index 000000000000..925b44bb886b --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPolicyTags +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_ListPolicyTags_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_list_policy_tags(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ListPolicyTagsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_policy_tags(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManager_ListPolicyTags_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_async.py new file mode 100644 index 000000000000..44744709b9e1 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTaxonomies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_ListTaxonomies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_list_taxonomies(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ListTaxonomiesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_taxonomies(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManager_ListTaxonomies_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_sync.py new file mode 100644 index 000000000000..5dd8a071a4bb --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTaxonomies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_ListTaxonomies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_list_taxonomies(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ListTaxonomiesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_taxonomies(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManager_ListTaxonomies_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_async.py new file mode 100644 index 000000000000..a8f5623b7c2c --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportTaxonomies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ExportTaxonomies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_export_taxonomies(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerSerializationAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ExportTaxonomiesRequest( + serialized_taxonomies=True, + parent="parent_value", + taxonomies=['taxonomies_value1', 'taxonomies_value2'], + ) + + # Make the request + response = await client.export_taxonomies(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ExportTaxonomies_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py new file mode 100644 index 000000000000..f609cb7fa09c --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportTaxonomies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ExportTaxonomies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_export_taxonomies(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerSerializationClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.ExportTaxonomiesRequest( + serialized_taxonomies=True, + parent="parent_value", + taxonomies=['taxonomies_value1', 'taxonomies_value2'], + ) + + # Make the request + response = client.export_taxonomies(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ExportTaxonomies_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_async.py new file mode 100644 index 000000000000..b22aefd13e34 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportTaxonomies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ImportTaxonomies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_import_taxonomies(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerSerializationAsyncClient() + + # Initialize request argument(s) + inline_source = datacatalog_v1beta1.InlineSource() + inline_source.taxonomies.display_name = "display_name_value" + + request = datacatalog_v1beta1.ImportTaxonomiesRequest( + inline_source=inline_source, + parent="parent_value", + ) + + # Make the request + response = await client.import_taxonomies(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ImportTaxonomies_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py new file mode 100644 index 000000000000..78e7a29f3cdd --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportTaxonomies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ImportTaxonomies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_import_taxonomies(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerSerializationClient() + + # Initialize request argument(s) + inline_source = datacatalog_v1beta1.InlineSource() + inline_source.taxonomies.display_name = "display_name_value" + + request = datacatalog_v1beta1.ImportTaxonomiesRequest( + inline_source=inline_source, + parent="parent_value", + ) + + # Make the request + response = client.import_taxonomies(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ImportTaxonomies_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_async.py new file mode 100644 index 000000000000..e1205653eb19 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_SetIamPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_set_iam_policy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManager_SetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_sync.py new file mode 100644 index 000000000000..e053df5959a8 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_set_iam_policy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManager_SetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_async.py new file mode 100644 index 000000000000..84a92fa19f8d --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_TestIamPermissions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_test_iam_permissions(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManager_TestIamPermissions_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_sync.py new file mode 100644 index 000000000000..6ee09e00ad9b --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_test_iam_permissions(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManager_TestIamPermissions_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_async.py new file mode 100644 index 000000000000..039ffcce1172 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdatePolicyTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_UpdatePolicyTag_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_update_policy_tag(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.UpdatePolicyTagRequest( + ) + + # Make the request + response = await client.update_policy_tag(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManager_UpdatePolicyTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_sync.py new file mode 100644 index 000000000000..88a1346aaf1a --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdatePolicyTag +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_UpdatePolicyTag_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_update_policy_tag(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.UpdatePolicyTagRequest( + ) + + # Make the request + response = client.update_policy_tag(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManager_UpdatePolicyTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_async.py new file mode 100644 index 000000000000..f027cd290376 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_UpdateTaxonomy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +async def sample_update_taxonomy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.UpdateTaxonomyRequest( + ) + + # Make the request + response = await client.update_taxonomy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManager_UpdateTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_sync.py new file mode 100644 index 000000000000..514f310b730c --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTaxonomy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1beta1_generated_PolicyTagManager_UpdateTaxonomy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1beta1 + + +def sample_update_taxonomy(): + # Create a client + client = datacatalog_v1beta1.PolicyTagManagerClient() + + # Initialize request argument(s) + request = datacatalog_v1beta1.UpdateTaxonomyRequest( + ) + + # Make the request + response = client.update_taxonomy(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1beta1_generated_PolicyTagManager_UpdateTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json new file mode 100644 index 000000000000..14e0e75feb01 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json @@ -0,0 +1,7024 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.datacatalog.v1beta1", + "version": "v1beta1" + } + ], + "language": "PYTHON", + "name": "google-cloud-datacatalog", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.create_entry_group", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntryGroup", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "CreateEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.CreateEntryGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry_group_id", + "type": "str" + }, + { + "name": "entry_group", + "type": "google.cloud.datacatalog_v1beta1.types.EntryGroup" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.EntryGroup", + "shortName": "create_entry_group" + }, + "description": "Sample for CreateEntryGroup", + "file": "datacatalog_v1beta1_generated_data_catalog_create_entry_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_CreateEntryGroup_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_create_entry_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.create_entry_group", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntryGroup", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "CreateEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.CreateEntryGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry_group_id", + "type": "str" + }, + { + "name": "entry_group", + "type": "google.cloud.datacatalog_v1beta1.types.EntryGroup" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.EntryGroup", + "shortName": "create_entry_group" + }, + "description": "Sample for CreateEntryGroup", + "file": "datacatalog_v1beta1_generated_data_catalog_create_entry_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_CreateEntryGroup_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_create_entry_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.create_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "CreateEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.CreateEntryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry_id", + "type": "str" + }, + { + "name": "entry", + "type": "google.cloud.datacatalog_v1beta1.types.Entry" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.Entry", + "shortName": "create_entry" + }, + "description": "Sample for CreateEntry", + "file": "datacatalog_v1beta1_generated_data_catalog_create_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_CreateEntry_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_create_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.create_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "CreateEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.CreateEntryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry_id", + "type": "str" + }, + { + "name": "entry", + "type": "google.cloud.datacatalog_v1beta1.types.Entry" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.Entry", + "shortName": "create_entry" + }, + "description": "Sample for CreateEntry", + "file": "datacatalog_v1beta1_generated_data_catalog_create_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_CreateEntry_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_create_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.create_tag_template_field", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplateField", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "CreateTagTemplateField" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.CreateTagTemplateFieldRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "tag_template_field_id", + "type": "str" + }, + { + "name": "tag_template_field", + "type": "google.cloud.datacatalog_v1beta1.types.TagTemplateField" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplateField", + "shortName": "create_tag_template_field" + }, + "description": "Sample for CreateTagTemplateField", + "file": "datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplateField_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.create_tag_template_field", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplateField", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "CreateTagTemplateField" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.CreateTagTemplateFieldRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "tag_template_field_id", + "type": "str" + }, + { + "name": "tag_template_field", + "type": "google.cloud.datacatalog_v1beta1.types.TagTemplateField" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplateField", + "shortName": "create_tag_template_field" + }, + "description": "Sample for CreateTagTemplateField", + "file": "datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplateField_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.create_tag_template", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplate", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "CreateTagTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.CreateTagTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "tag_template_id", + "type": "str" + }, + { + "name": "tag_template", + "type": "google.cloud.datacatalog_v1beta1.types.TagTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplate", + "shortName": "create_tag_template" + }, + "description": "Sample for CreateTagTemplate", + "file": "datacatalog_v1beta1_generated_data_catalog_create_tag_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplate_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_create_tag_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.create_tag_template", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplate", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "CreateTagTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.CreateTagTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "tag_template_id", + "type": "str" + }, + { + "name": "tag_template", + "type": "google.cloud.datacatalog_v1beta1.types.TagTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplate", + "shortName": "create_tag_template" + }, + "description": "Sample for CreateTagTemplate", + "file": "datacatalog_v1beta1_generated_data_catalog_create_tag_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplate_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_create_tag_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.create_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.CreateTag", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "CreateTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.CreateTagRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "tag", + "type": "google.cloud.datacatalog_v1beta1.types.Tag" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.Tag", + "shortName": "create_tag" + }, + "description": "Sample for CreateTag", + "file": "datacatalog_v1beta1_generated_data_catalog_create_tag_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_CreateTag_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_create_tag_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.create_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.CreateTag", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "CreateTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.CreateTagRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "tag", + "type": "google.cloud.datacatalog_v1beta1.types.Tag" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.Tag", + "shortName": "create_tag" + }, + "description": "Sample for CreateTag", + "file": "datacatalog_v1beta1_generated_data_catalog_create_tag_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_CreateTag_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_create_tag_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.delete_entry_group", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntryGroup", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "DeleteEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.DeleteEntryGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_entry_group" + }, + "description": "Sample for DeleteEntryGroup", + "file": "datacatalog_v1beta1_generated_data_catalog_delete_entry_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_DeleteEntryGroup_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_delete_entry_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.delete_entry_group", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntryGroup", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "DeleteEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.DeleteEntryGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_entry_group" + }, + "description": "Sample for DeleteEntryGroup", + "file": "datacatalog_v1beta1_generated_data_catalog_delete_entry_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_DeleteEntryGroup_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_delete_entry_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.delete_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "DeleteEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.DeleteEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_entry" + }, + "description": "Sample for DeleteEntry", + "file": "datacatalog_v1beta1_generated_data_catalog_delete_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_DeleteEntry_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_delete_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.delete_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "DeleteEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.DeleteEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_entry" + }, + "description": "Sample for DeleteEntry", + "file": "datacatalog_v1beta1_generated_data_catalog_delete_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_DeleteEntry_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_delete_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.delete_tag_template_field", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplateField", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "DeleteTagTemplateField" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.DeleteTagTemplateFieldRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "force", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_tag_template_field" + }, + "description": "Sample for DeleteTagTemplateField", + "file": "datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplateField_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.delete_tag_template_field", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplateField", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "DeleteTagTemplateField" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.DeleteTagTemplateFieldRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "force", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_tag_template_field" + }, + "description": "Sample for DeleteTagTemplateField", + "file": "datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplateField_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.delete_tag_template", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplate", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "DeleteTagTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.DeleteTagTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "force", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_tag_template" + }, + "description": "Sample for DeleteTagTemplate", + "file": "datacatalog_v1beta1_generated_data_catalog_delete_tag_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplate_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_delete_tag_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.delete_tag_template", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplate", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "DeleteTagTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.DeleteTagTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "force", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_tag_template" + }, + "description": "Sample for DeleteTagTemplate", + "file": "datacatalog_v1beta1_generated_data_catalog_delete_tag_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplate_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_delete_tag_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.delete_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTag", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "DeleteTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.DeleteTagRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_tag" + }, + "description": "Sample for DeleteTag", + "file": "datacatalog_v1beta1_generated_data_catalog_delete_tag_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_DeleteTag_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_delete_tag_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.delete_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTag", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "DeleteTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.DeleteTagRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_tag" + }, + "description": "Sample for DeleteTag", + "file": "datacatalog_v1beta1_generated_data_catalog_delete_tag_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_DeleteTag_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_delete_tag_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.get_entry_group", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.GetEntryGroup", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "GetEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.GetEntryGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "read_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.EntryGroup", + "shortName": "get_entry_group" + }, + "description": "Sample for GetEntryGroup", + "file": "datacatalog_v1beta1_generated_data_catalog_get_entry_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_GetEntryGroup_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_get_entry_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.get_entry_group", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.GetEntryGroup", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "GetEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.GetEntryGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "read_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.EntryGroup", + "shortName": "get_entry_group" + }, + "description": "Sample for GetEntryGroup", + "file": "datacatalog_v1beta1_generated_data_catalog_get_entry_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_GetEntryGroup_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_get_entry_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.get_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.GetEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "GetEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.GetEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.Entry", + "shortName": "get_entry" + }, + "description": "Sample for GetEntry", + "file": "datacatalog_v1beta1_generated_data_catalog_get_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_GetEntry_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_get_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.get_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.GetEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "GetEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.GetEntryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.Entry", + "shortName": "get_entry" + }, + "description": "Sample for GetEntry", + "file": "datacatalog_v1beta1_generated_data_catalog_get_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_GetEntry_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_get_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.get_iam_policy", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.GetIamPolicy", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "datacatalog_v1beta1_generated_data_catalog_get_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_GetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_get_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.get_iam_policy", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.GetIamPolicy", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "datacatalog_v1beta1_generated_data_catalog_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_GetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.get_tag_template", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.GetTagTemplate", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "GetTagTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.GetTagTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplate", + "shortName": "get_tag_template" + }, + "description": "Sample for GetTagTemplate", + "file": "datacatalog_v1beta1_generated_data_catalog_get_tag_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_GetTagTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_get_tag_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.get_tag_template", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.GetTagTemplate", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "GetTagTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.GetTagTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplate", + "shortName": "get_tag_template" + }, + "description": "Sample for GetTagTemplate", + "file": "datacatalog_v1beta1_generated_data_catalog_get_tag_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_GetTagTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_get_tag_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.list_entries", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.ListEntries", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "ListEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.ListEntriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListEntriesAsyncPager", + "shortName": "list_entries" + }, + "description": "Sample for ListEntries", + "file": "datacatalog_v1beta1_generated_data_catalog_list_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_ListEntries_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_list_entries_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.list_entries", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.ListEntries", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "ListEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.ListEntriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListEntriesPager", + "shortName": "list_entries" + }, + "description": "Sample for ListEntries", + "file": "datacatalog_v1beta1_generated_data_catalog_list_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_ListEntries_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_list_entries_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.list_entry_groups", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.ListEntryGroups", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "ListEntryGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.ListEntryGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListEntryGroupsAsyncPager", + "shortName": "list_entry_groups" + }, + "description": "Sample for ListEntryGroups", + "file": "datacatalog_v1beta1_generated_data_catalog_list_entry_groups_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_ListEntryGroups_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_list_entry_groups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.list_entry_groups", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.ListEntryGroups", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "ListEntryGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.ListEntryGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListEntryGroupsPager", + "shortName": "list_entry_groups" + }, + "description": "Sample for ListEntryGroups", + "file": "datacatalog_v1beta1_generated_data_catalog_list_entry_groups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_ListEntryGroups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_list_entry_groups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.list_tags", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.ListTags", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "ListTags" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.ListTagsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListTagsAsyncPager", + "shortName": "list_tags" + }, + "description": "Sample for ListTags", + "file": "datacatalog_v1beta1_generated_data_catalog_list_tags_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_ListTags_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_list_tags_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.list_tags", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.ListTags", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "ListTags" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.ListTagsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListTagsPager", + "shortName": "list_tags" + }, + "description": "Sample for ListTags", + "file": "datacatalog_v1beta1_generated_data_catalog_list_tags_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_ListTags_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_list_tags_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.lookup_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.LookupEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "LookupEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.LookupEntryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.Entry", + "shortName": "lookup_entry" + }, + "description": "Sample for LookupEntry", + "file": "datacatalog_v1beta1_generated_data_catalog_lookup_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_LookupEntry_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_lookup_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.lookup_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.LookupEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "LookupEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.LookupEntryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.Entry", + "shortName": "lookup_entry" + }, + "description": "Sample for LookupEntry", + "file": "datacatalog_v1beta1_generated_data_catalog_lookup_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_LookupEntry_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_lookup_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.rename_tag_template_field_enum_value", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.RenameTagTemplateFieldEnumValue", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "RenameTagTemplateFieldEnumValue" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.RenameTagTemplateFieldEnumValueRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "new_enum_value_display_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplateField", + "shortName": "rename_tag_template_field_enum_value" + }, + "description": "Sample for RenameTagTemplateFieldEnumValue", + "file": "datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.rename_tag_template_field_enum_value", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.RenameTagTemplateFieldEnumValue", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "RenameTagTemplateFieldEnumValue" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.RenameTagTemplateFieldEnumValueRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "new_enum_value_display_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplateField", + "shortName": "rename_tag_template_field_enum_value" + }, + "description": "Sample for RenameTagTemplateFieldEnumValue", + "file": "datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.rename_tag_template_field", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.RenameTagTemplateField", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "RenameTagTemplateField" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.RenameTagTemplateFieldRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "new_tag_template_field_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplateField", + "shortName": "rename_tag_template_field" + }, + "description": "Sample for RenameTagTemplateField", + "file": "datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateField_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.rename_tag_template_field", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.RenameTagTemplateField", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "RenameTagTemplateField" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.RenameTagTemplateFieldRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "new_tag_template_field_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplateField", + "shortName": "rename_tag_template_field" + }, + "description": "Sample for RenameTagTemplateField", + "file": "datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateField_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.search_catalog", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "SearchCatalog" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.SearchCatalogRequest" + }, + { + "name": "scope", + "type": "google.cloud.datacatalog_v1beta1.types.SearchCatalogRequest.Scope" + }, + { + "name": "query", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.SearchCatalogAsyncPager", + "shortName": "search_catalog" + }, + "description": "Sample for SearchCatalog", + "file": "datacatalog_v1beta1_generated_data_catalog_search_catalog_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_SearchCatalog_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_search_catalog_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.search_catalog", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "SearchCatalog" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.SearchCatalogRequest" + }, + { + "name": "scope", + "type": "google.cloud.datacatalog_v1beta1.types.SearchCatalogRequest.Scope" + }, + { + "name": "query", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.SearchCatalogPager", + "shortName": "search_catalog" + }, + "description": "Sample for SearchCatalog", + "file": "datacatalog_v1beta1_generated_data_catalog_search_catalog_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_SearchCatalog_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_search_catalog_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.set_iam_policy", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.SetIamPolicy", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "datacatalog_v1beta1_generated_data_catalog_set_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_SetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_set_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.set_iam_policy", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.SetIamPolicy", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "datacatalog_v1beta1_generated_data_catalog_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_SetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.TestIamPermissions", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_TestIamPermissions_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.TestIamPermissions", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.update_entry_group", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntryGroup", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "UpdateEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.UpdateEntryGroupRequest" + }, + { + "name": "entry_group", + "type": "google.cloud.datacatalog_v1beta1.types.EntryGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.EntryGroup", + "shortName": "update_entry_group" + }, + "description": "Sample for UpdateEntryGroup", + "file": "datacatalog_v1beta1_generated_data_catalog_update_entry_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_UpdateEntryGroup_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_update_entry_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.update_entry_group", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntryGroup", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "UpdateEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.UpdateEntryGroupRequest" + }, + { + "name": "entry_group", + "type": "google.cloud.datacatalog_v1beta1.types.EntryGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.EntryGroup", + "shortName": "update_entry_group" + }, + "description": "Sample for UpdateEntryGroup", + "file": "datacatalog_v1beta1_generated_data_catalog_update_entry_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_UpdateEntryGroup_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_update_entry_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.update_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "UpdateEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.UpdateEntryRequest" + }, + { + "name": "entry", + "type": "google.cloud.datacatalog_v1beta1.types.Entry" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.Entry", + "shortName": "update_entry" + }, + "description": "Sample for UpdateEntry", + "file": "datacatalog_v1beta1_generated_data_catalog_update_entry_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_UpdateEntry_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_update_entry_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.update_entry", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntry", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "UpdateEntry" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.UpdateEntryRequest" + }, + { + "name": "entry", + "type": "google.cloud.datacatalog_v1beta1.types.Entry" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.Entry", + "shortName": "update_entry" + }, + "description": "Sample for UpdateEntry", + "file": "datacatalog_v1beta1_generated_data_catalog_update_entry_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_UpdateEntry_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_update_entry_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.update_tag_template_field", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplateField", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "UpdateTagTemplateField" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.UpdateTagTemplateFieldRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "tag_template_field", + "type": "google.cloud.datacatalog_v1beta1.types.TagTemplateField" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplateField", + "shortName": "update_tag_template_field" + }, + "description": "Sample for UpdateTagTemplateField", + "file": "datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplateField_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.update_tag_template_field", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplateField", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "UpdateTagTemplateField" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.UpdateTagTemplateFieldRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "tag_template_field", + "type": "google.cloud.datacatalog_v1beta1.types.TagTemplateField" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplateField", + "shortName": "update_tag_template_field" + }, + "description": "Sample for UpdateTagTemplateField", + "file": "datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplateField_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.update_tag_template", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplate", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "UpdateTagTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.UpdateTagTemplateRequest" + }, + { + "name": "tag_template", + "type": "google.cloud.datacatalog_v1beta1.types.TagTemplate" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplate", + "shortName": "update_tag_template" + }, + "description": "Sample for UpdateTagTemplate", + "file": "datacatalog_v1beta1_generated_data_catalog_update_tag_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplate_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_update_tag_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.update_tag_template", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplate", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "UpdateTagTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.UpdateTagTemplateRequest" + }, + { + "name": "tag_template", + "type": "google.cloud.datacatalog_v1beta1.types.TagTemplate" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplate", + "shortName": "update_tag_template" + }, + "description": "Sample for UpdateTagTemplate", + "file": "datacatalog_v1beta1_generated_data_catalog_update_tag_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplate_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_update_tag_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.update_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTag", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "UpdateTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.UpdateTagRequest" + }, + { + "name": "tag", + "type": "google.cloud.datacatalog_v1beta1.types.Tag" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.Tag", + "shortName": "update_tag" + }, + "description": "Sample for UpdateTag", + "file": "datacatalog_v1beta1_generated_data_catalog_update_tag_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_UpdateTag_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_update_tag_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.update_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTag", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "UpdateTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.UpdateTagRequest" + }, + { + "name": "tag", + "type": "google.cloud.datacatalog_v1beta1.types.Tag" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.Tag", + "shortName": "update_tag" + }, + "description": "Sample for UpdateTag", + "file": "datacatalog_v1beta1_generated_data_catalog_update_tag_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_DataCatalog_UpdateTag_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_data_catalog_update_tag_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerSerializationAsyncClient", + "shortName": "PolicyTagManagerSerializationAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerSerializationAsyncClient.export_taxonomies", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ExportTaxonomies", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization", + "shortName": "PolicyTagManagerSerialization" + }, + "shortName": "ExportTaxonomies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.ExportTaxonomiesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.ExportTaxonomiesResponse", + "shortName": "export_taxonomies" + }, + "description": "Sample for ExportTaxonomies", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ExportTaxonomies_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerSerializationClient", + "shortName": "PolicyTagManagerSerializationClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerSerializationClient.export_taxonomies", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ExportTaxonomies", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization", + "shortName": "PolicyTagManagerSerialization" + }, + "shortName": "ExportTaxonomies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.ExportTaxonomiesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.ExportTaxonomiesResponse", + "shortName": "export_taxonomies" + }, + "description": "Sample for ExportTaxonomies", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ExportTaxonomies_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerSerializationAsyncClient", + "shortName": "PolicyTagManagerSerializationAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerSerializationAsyncClient.import_taxonomies", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ImportTaxonomies", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization", + "shortName": "PolicyTagManagerSerialization" + }, + "shortName": "ImportTaxonomies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.ImportTaxonomiesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.ImportTaxonomiesResponse", + "shortName": "import_taxonomies" + }, + "description": "Sample for ImportTaxonomies", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ImportTaxonomies_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerSerializationClient", + "shortName": "PolicyTagManagerSerializationClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerSerializationClient.import_taxonomies", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ImportTaxonomies", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization", + "shortName": "PolicyTagManagerSerialization" + }, + "shortName": "ImportTaxonomies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.ImportTaxonomiesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.ImportTaxonomiesResponse", + "shortName": "import_taxonomies" + }, + "description": "Sample for ImportTaxonomies", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ImportTaxonomies_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.create_policy_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.CreatePolicyTag", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "CreatePolicyTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.CreatePolicyTagRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "policy_tag", + "type": "google.cloud.datacatalog_v1beta1.types.PolicyTag" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.PolicyTag", + "shortName": "create_policy_tag" + }, + "description": "Sample for CreatePolicyTag", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_CreatePolicyTag_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.create_policy_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.CreatePolicyTag", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "CreatePolicyTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.CreatePolicyTagRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "policy_tag", + "type": "google.cloud.datacatalog_v1beta1.types.PolicyTag" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.PolicyTag", + "shortName": "create_policy_tag" + }, + "description": "Sample for CreatePolicyTag", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_CreatePolicyTag_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.create_taxonomy", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.CreateTaxonomy", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "CreateTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.CreateTaxonomyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "taxonomy", + "type": "google.cloud.datacatalog_v1beta1.types.Taxonomy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.Taxonomy", + "shortName": "create_taxonomy" + }, + "description": "Sample for CreateTaxonomy", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_CreateTaxonomy_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.create_taxonomy", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.CreateTaxonomy", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "CreateTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.CreateTaxonomyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "taxonomy", + "type": "google.cloud.datacatalog_v1beta1.types.Taxonomy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.Taxonomy", + "shortName": "create_taxonomy" + }, + "description": "Sample for CreateTaxonomy", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_CreateTaxonomy_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.delete_policy_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.DeletePolicyTag", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "DeletePolicyTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.DeletePolicyTagRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_policy_tag" + }, + "description": "Sample for DeletePolicyTag", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_DeletePolicyTag_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.delete_policy_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.DeletePolicyTag", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "DeletePolicyTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.DeletePolicyTagRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_policy_tag" + }, + "description": "Sample for DeletePolicyTag", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_DeletePolicyTag_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.delete_taxonomy", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.DeleteTaxonomy", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "DeleteTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.DeleteTaxonomyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_taxonomy" + }, + "description": "Sample for DeleteTaxonomy", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_DeleteTaxonomy_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.delete_taxonomy", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.DeleteTaxonomy", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "DeleteTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.DeleteTaxonomyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_taxonomy" + }, + "description": "Sample for DeleteTaxonomy", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_DeleteTaxonomy_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.get_iam_policy", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.GetIamPolicy", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_GetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.get_iam_policy", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.GetIamPolicy", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_GetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.get_policy_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.GetPolicyTag", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "GetPolicyTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.GetPolicyTagRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.PolicyTag", + "shortName": "get_policy_tag" + }, + "description": "Sample for GetPolicyTag", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_GetPolicyTag_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.get_policy_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.GetPolicyTag", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "GetPolicyTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.GetPolicyTagRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.PolicyTag", + "shortName": "get_policy_tag" + }, + "description": "Sample for GetPolicyTag", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_GetPolicyTag_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.get_taxonomy", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.GetTaxonomy", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "GetTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.GetTaxonomyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.Taxonomy", + "shortName": "get_taxonomy" + }, + "description": "Sample for GetTaxonomy", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_GetTaxonomy_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.get_taxonomy", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.GetTaxonomy", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "GetTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.GetTaxonomyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.Taxonomy", + "shortName": "get_taxonomy" + }, + "description": "Sample for GetTaxonomy", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_GetTaxonomy_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.list_policy_tags", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.ListPolicyTags", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "ListPolicyTags" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.ListPolicyTagsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.services.policy_tag_manager.pagers.ListPolicyTagsAsyncPager", + "shortName": "list_policy_tags" + }, + "description": "Sample for ListPolicyTags", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_ListPolicyTags_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.list_policy_tags", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.ListPolicyTags", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "ListPolicyTags" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.ListPolicyTagsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.services.policy_tag_manager.pagers.ListPolicyTagsPager", + "shortName": "list_policy_tags" + }, + "description": "Sample for ListPolicyTags", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_ListPolicyTags_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.list_taxonomies", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.ListTaxonomies", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "ListTaxonomies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.ListTaxonomiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.services.policy_tag_manager.pagers.ListTaxonomiesAsyncPager", + "shortName": "list_taxonomies" + }, + "description": "Sample for ListTaxonomies", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_ListTaxonomies_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.list_taxonomies", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.ListTaxonomies", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "ListTaxonomies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.ListTaxonomiesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.services.policy_tag_manager.pagers.ListTaxonomiesPager", + "shortName": "list_taxonomies" + }, + "description": "Sample for ListTaxonomies", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_ListTaxonomies_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.set_iam_policy", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.SetIamPolicy", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_SetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.set_iam_policy", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.SetIamPolicy", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_SetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.TestIamPermissions", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_TestIamPermissions_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.TestIamPermissions", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.update_policy_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.UpdatePolicyTag", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "UpdatePolicyTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.UpdatePolicyTagRequest" + }, + { + "name": "policy_tag", + "type": "google.cloud.datacatalog_v1beta1.types.PolicyTag" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.PolicyTag", + "shortName": "update_policy_tag" + }, + "description": "Sample for UpdatePolicyTag", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_UpdatePolicyTag_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.update_policy_tag", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.UpdatePolicyTag", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "UpdatePolicyTag" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.UpdatePolicyTagRequest" + }, + { + "name": "policy_tag", + "type": "google.cloud.datacatalog_v1beta1.types.PolicyTag" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.PolicyTag", + "shortName": "update_policy_tag" + }, + "description": "Sample for UpdatePolicyTag", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_UpdatePolicyTag_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", + "shortName": "PolicyTagManagerAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.update_taxonomy", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.UpdateTaxonomy", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "UpdateTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.UpdateTaxonomyRequest" + }, + { + "name": "taxonomy", + "type": "google.cloud.datacatalog_v1beta1.types.Taxonomy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.Taxonomy", + "shortName": "update_taxonomy" + }, + "description": "Sample for UpdateTaxonomy", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_UpdateTaxonomy_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", + "shortName": "PolicyTagManagerClient" + }, + "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.update_taxonomy", + "method": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.UpdateTaxonomy", + "service": { + "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", + "shortName": "PolicyTagManager" + }, + "shortName": "UpdateTaxonomy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1beta1.types.UpdateTaxonomyRequest" + }, + { + "name": "taxonomy", + "type": "google.cloud.datacatalog_v1beta1.types.Taxonomy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1beta1.types.Taxonomy", + "shortName": "update_taxonomy" + }, + "description": "Sample for UpdateTaxonomy", + "file": "datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_UpdateTaxonomy_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_sync.py" + } + ] +} diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/scripts/fixup_datacatalog_v1beta1_keywords.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/scripts/fixup_datacatalog_v1beta1_keywords.py new file mode 100644 index 000000000000..a490f699057a --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/scripts/fixup_datacatalog_v1beta1_keywords.py @@ -0,0 +1,215 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class datacatalogCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_entry': ('parent', 'entry_id', 'entry', ), + 'create_entry_group': ('parent', 'entry_group_id', 'entry_group', ), + 'create_policy_tag': ('parent', 'policy_tag', ), + 'create_tag': ('parent', 'tag', ), + 'create_tag_template': ('parent', 'tag_template_id', 'tag_template', ), + 'create_tag_template_field': ('parent', 'tag_template_field_id', 'tag_template_field', ), + 'create_taxonomy': ('parent', 'taxonomy', ), + 'delete_entry': ('name', ), + 'delete_entry_group': ('name', 'force', ), + 'delete_policy_tag': ('name', ), + 'delete_tag': ('name', ), + 'delete_tag_template': ('name', 'force', ), + 'delete_tag_template_field': ('name', 'force', ), + 'delete_taxonomy': ('name', ), + 'export_taxonomies': ('parent', 'taxonomies', 'serialized_taxonomies', ), + 'get_entry': ('name', ), + 'get_entry_group': ('name', 'read_mask', ), + 'get_iam_policy': ('resource', 'options', ), + 'get_policy_tag': ('name', ), + 'get_tag_template': ('name', ), + 'get_taxonomy': ('name', ), + 'import_taxonomies': ('parent', 'inline_source', ), + 'list_entries': ('parent', 'page_size', 'page_token', 'read_mask', ), + 'list_entry_groups': ('parent', 'page_size', 'page_token', ), + 'list_policy_tags': ('parent', 'page_size', 'page_token', ), + 'list_tags': ('parent', 'page_size', 'page_token', ), + 'list_taxonomies': ('parent', 'page_size', 'page_token', 'filter', ), + 'lookup_entry': ('linked_resource', 'sql_resource', ), + 'rename_tag_template_field': ('name', 'new_tag_template_field_id', ), + 'rename_tag_template_field_enum_value': ('name', 'new_enum_value_display_name', ), + 'search_catalog': ('scope', 'query', 'page_size', 'page_token', 'order_by', ), + 'set_iam_policy': ('resource', 'policy', 'update_mask', ), + 'test_iam_permissions': ('resource', 'permissions', ), + 'update_entry': ('entry', 'update_mask', ), + 'update_entry_group': ('entry_group', 'update_mask', ), + 'update_policy_tag': ('policy_tag', 'update_mask', ), + 'update_tag': ('tag', 'update_mask', ), + 'update_tag_template': ('tag_template', 'update_mask', ), + 'update_tag_template_field': ('name', 'tag_template_field', 'update_mask', ), + 'update_taxonomy': ('taxonomy', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=datacatalogCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the datacatalog client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/setup.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/setup.py new file mode 100644 index 000000000000..1d59854d3bcb --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/setup.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-datacatalog' + + +description = "Google Cloud Datacatalog API client library" + +version = {} +with open(os.path.join(package_root, 'google/cloud/datacatalog/gapic_version.py')) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", +] +url = "https://github.com/googleapis/python-datacatalog" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google", "google.cloud"] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.12.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.7.txt new file mode 100644 index 000000000000..2beecf99e0be --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 +grpc-google-iam-v1==0.12.4 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/__init__.py new file mode 100644 index 000000000000..1b4db446eb8d --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/__init__.py new file mode 100644 index 000000000000..1b4db446eb8d --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..1b4db446eb8d --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/__init__.py new file mode 100644 index 000000000000..1b4db446eb8d --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py new file mode 100644 index 000000000000..5da43c8578c3 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py @@ -0,0 +1,8709 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.datacatalog_v1beta1.services.data_catalog import DataCatalogAsyncClient +from google.cloud.datacatalog_v1beta1.services.data_catalog import DataCatalogClient +from google.cloud.datacatalog_v1beta1.services.data_catalog import pagers +from google.cloud.datacatalog_v1beta1.services.data_catalog import transports +from google.cloud.datacatalog_v1beta1.types import common +from google.cloud.datacatalog_v1beta1.types import datacatalog +from google.cloud.datacatalog_v1beta1.types import gcs_fileset_spec +from google.cloud.datacatalog_v1beta1.types import schema +from google.cloud.datacatalog_v1beta1.types import search +from google.cloud.datacatalog_v1beta1.types import table_spec +from google.cloud.datacatalog_v1beta1.types import tags +from google.cloud.datacatalog_v1beta1.types import timestamps +from google.cloud.datacatalog_v1beta1.types import usage +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DataCatalogClient._get_default_mtls_endpoint(None) is None + assert DataCatalogClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert DataCatalogClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert DataCatalogClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert DataCatalogClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert DataCatalogClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DataCatalogClient, "grpc"), + (DataCatalogAsyncClient, "grpc_asyncio"), +]) +def test_data_catalog_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'datacatalog.googleapis.com:443' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.DataCatalogGrpcTransport, "grpc"), + (transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_data_catalog_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (DataCatalogClient, "grpc"), + (DataCatalogAsyncClient, "grpc_asyncio"), +]) +def test_data_catalog_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'datacatalog.googleapis.com:443' + ) + + +def test_data_catalog_client_get_transport_class(): + transport = DataCatalogClient.get_transport_class() + available_transports = [ + transports.DataCatalogGrpcTransport, + ] + assert transport in available_transports + + transport = DataCatalogClient.get_transport_class("grpc") + assert transport == transports.DataCatalogGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc"), + (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(DataCatalogClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogClient)) +@mock.patch.object(DataCatalogAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogAsyncClient)) +def test_data_catalog_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DataCatalogClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DataCatalogClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", "true"), + (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", "false"), + (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(DataCatalogClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogClient)) +@mock.patch.object(DataCatalogAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_data_catalog_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + DataCatalogClient, DataCatalogAsyncClient +]) +@mock.patch.object(DataCatalogClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogClient)) +@mock.patch.object(DataCatalogAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogAsyncClient)) +def test_data_catalog_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc"), + (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_data_catalog_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", grpc_helpers), + (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_data_catalog_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_data_catalog_client_client_options_from_dict(): + with mock.patch('google.cloud.datacatalog_v1beta1.services.data_catalog.transports.DataCatalogGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = DataCatalogClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", grpc_helpers), + (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_data_catalog_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="datacatalog.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.SearchCatalogRequest, + dict, +]) +def test_search_catalog(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_catalog), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.SearchCatalogResponse( + total_size=1086, + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.search_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.SearchCatalogRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchCatalogPager) + assert response.total_size == 1086 + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_search_catalog_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_catalog), + '__call__') as call: + client.search_catalog() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.SearchCatalogRequest() + +@pytest.mark.asyncio +async def test_search_catalog_async(transport: str = 'grpc_asyncio', request_type=datacatalog.SearchCatalogRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_catalog), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.SearchCatalogResponse( + total_size=1086, + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.search_catalog(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.SearchCatalogRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchCatalogAsyncPager) + assert response.total_size == 1086 + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_search_catalog_async_from_dict(): + await test_search_catalog_async(request_type=dict) + + +def test_search_catalog_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_catalog), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.SearchCatalogResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.search_catalog( + scope=datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']), + query='query_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].scope + mock_val = datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']) + assert arg == mock_val + arg = args[0].query + mock_val = 'query_value' + assert arg == mock_val + + +def test_search_catalog_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_catalog( + datacatalog.SearchCatalogRequest(), + scope=datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']), + query='query_value', + ) + +@pytest.mark.asyncio +async def test_search_catalog_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_catalog), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.SearchCatalogResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.SearchCatalogResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.search_catalog( + scope=datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']), + query='query_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].scope + mock_val = datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']) + assert arg == mock_val + arg = args[0].query + mock_val = 'query_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_search_catalog_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.search_catalog( + datacatalog.SearchCatalogRequest(), + scope=datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']), + query='query_value', + ) + + +def test_search_catalog_pager(transport_name: str = "grpc"): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_catalog), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + search.SearchCatalogResult(), + search.SearchCatalogResult(), + ], + next_page_token='abc', + ), + datacatalog.SearchCatalogResponse( + results=[], + next_page_token='def', + ), + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + ], + next_page_token='ghi', + ), + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + search.SearchCatalogResult(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.search_catalog(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, search.SearchCatalogResult) + for i in results) +def test_search_catalog_pages(transport_name: str = "grpc"): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_catalog), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + search.SearchCatalogResult(), + search.SearchCatalogResult(), + ], + next_page_token='abc', + ), + datacatalog.SearchCatalogResponse( + results=[], + next_page_token='def', + ), + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + ], + next_page_token='ghi', + ), + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + search.SearchCatalogResult(), + ], + ), + RuntimeError, + ) + pages = list(client.search_catalog(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_search_catalog_async_pager(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_catalog), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + search.SearchCatalogResult(), + search.SearchCatalogResult(), + ], + next_page_token='abc', + ), + datacatalog.SearchCatalogResponse( + results=[], + next_page_token='def', + ), + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + ], + next_page_token='ghi', + ), + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + search.SearchCatalogResult(), + ], + ), + RuntimeError, + ) + async_pager = await client.search_catalog(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, search.SearchCatalogResult) + for i in responses) + + +@pytest.mark.asyncio +async def test_search_catalog_async_pages(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_catalog), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + search.SearchCatalogResult(), + search.SearchCatalogResult(), + ], + next_page_token='abc', + ), + datacatalog.SearchCatalogResponse( + results=[], + next_page_token='def', + ), + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + ], + next_page_token='ghi', + ), + datacatalog.SearchCatalogResponse( + results=[ + search.SearchCatalogResult(), + search.SearchCatalogResult(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.search_catalog(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + datacatalog.CreateEntryGroupRequest, + dict, +]) +def test_create_entry_group(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.EntryGroup( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.create_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateEntryGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.EntryGroup) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_create_entry_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + client.create_entry_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateEntryGroupRequest() + +@pytest.mark.asyncio +async def test_create_entry_group_async(transport: str = 'grpc_asyncio', request_type=datacatalog.CreateEntryGroupRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.create_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateEntryGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.EntryGroup) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_create_entry_group_async_from_dict(): + await test_create_entry_group_async(request_type=dict) + + +def test_create_entry_group_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.CreateEntryGroupRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + call.return_value = datacatalog.EntryGroup() + client.create_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_entry_group_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.CreateEntryGroupRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) + await client.create_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_entry_group_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.EntryGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_entry_group( + parent='parent_value', + entry_group_id='entry_group_id_value', + entry_group=datacatalog.EntryGroup(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entry_group_id + mock_val = 'entry_group_id_value' + assert arg == mock_val + arg = args[0].entry_group + mock_val = datacatalog.EntryGroup(name='name_value') + assert arg == mock_val + + +def test_create_entry_group_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_entry_group( + datacatalog.CreateEntryGroupRequest(), + parent='parent_value', + entry_group_id='entry_group_id_value', + entry_group=datacatalog.EntryGroup(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_entry_group_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.EntryGroup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_entry_group( + parent='parent_value', + entry_group_id='entry_group_id_value', + entry_group=datacatalog.EntryGroup(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entry_group_id + mock_val = 'entry_group_id_value' + assert arg == mock_val + arg = args[0].entry_group + mock_val = datacatalog.EntryGroup(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_entry_group_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_entry_group( + datacatalog.CreateEntryGroupRequest(), + parent='parent_value', + entry_group_id='entry_group_id_value', + entry_group=datacatalog.EntryGroup(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.UpdateEntryGroupRequest, + dict, +]) +def test_update_entry_group(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.EntryGroup( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.update_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateEntryGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.EntryGroup) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_update_entry_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + client.update_entry_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateEntryGroupRequest() + +@pytest.mark.asyncio +async def test_update_entry_group_async(transport: str = 'grpc_asyncio', request_type=datacatalog.UpdateEntryGroupRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.update_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateEntryGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.EntryGroup) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_update_entry_group_async_from_dict(): + await test_update_entry_group_async(request_type=dict) + + +def test_update_entry_group_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.UpdateEntryGroupRequest() + + request.entry_group.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + call.return_value = datacatalog.EntryGroup() + client.update_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entry_group.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_entry_group_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.UpdateEntryGroupRequest() + + request.entry_group.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) + await client.update_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entry_group.name=name_value', + ) in kw['metadata'] + + +def test_update_entry_group_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.EntryGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_entry_group( + entry_group=datacatalog.EntryGroup(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].entry_group + mock_val = datacatalog.EntryGroup(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_entry_group_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_entry_group( + datacatalog.UpdateEntryGroupRequest(), + entry_group=datacatalog.EntryGroup(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_entry_group_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.EntryGroup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_entry_group( + entry_group=datacatalog.EntryGroup(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].entry_group + mock_val = datacatalog.EntryGroup(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_entry_group_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_entry_group( + datacatalog.UpdateEntryGroupRequest(), + entry_group=datacatalog.EntryGroup(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.GetEntryGroupRequest, + dict, +]) +def test_get_entry_group(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.EntryGroup( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.get_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.GetEntryGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.EntryGroup) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_get_entry_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + client.get_entry_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.GetEntryGroupRequest() + +@pytest.mark.asyncio +async def test_get_entry_group_async(transport: str = 'grpc_asyncio', request_type=datacatalog.GetEntryGroupRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.get_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.GetEntryGroupRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.EntryGroup) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_get_entry_group_async_from_dict(): + await test_get_entry_group_async(request_type=dict) + + +def test_get_entry_group_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.GetEntryGroupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + call.return_value = datacatalog.EntryGroup() + client.get_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_entry_group_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.GetEntryGroupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) + await client.get_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_entry_group_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.EntryGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_entry_group( + name='name_value', + read_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].read_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_get_entry_group_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entry_group( + datacatalog.GetEntryGroupRequest(), + name='name_value', + read_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_get_entry_group_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.EntryGroup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_entry_group( + name='name_value', + read_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].read_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_entry_group_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_entry_group( + datacatalog.GetEntryGroupRequest(), + name='name_value', + read_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.DeleteEntryGroupRequest, + dict, +]) +def test_delete_entry_group(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteEntryGroupRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_entry_group_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + client.delete_entry_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteEntryGroupRequest() + +@pytest.mark.asyncio +async def test_delete_entry_group_async(transport: str = 'grpc_asyncio', request_type=datacatalog.DeleteEntryGroupRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteEntryGroupRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_entry_group_async_from_dict(): + await test_delete_entry_group_async(request_type=dict) + + +def test_delete_entry_group_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.DeleteEntryGroupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + call.return_value = None + client.delete_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_entry_group_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.DeleteEntryGroupRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_entry_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_entry_group_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_entry_group( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_entry_group_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entry_group( + datacatalog.DeleteEntryGroupRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_entry_group_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_entry_group( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_entry_group_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_entry_group( + datacatalog.DeleteEntryGroupRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.ListEntryGroupsRequest, + dict, +]) +def test_list_entry_groups(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.ListEntryGroupsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_entry_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ListEntryGroupsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntryGroupsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_entry_groups_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + client.list_entry_groups() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ListEntryGroupsRequest() + +@pytest.mark.asyncio +async def test_list_entry_groups_async(transport: str = 'grpc_asyncio', request_type=datacatalog.ListEntryGroupsRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntryGroupsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_entry_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ListEntryGroupsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntryGroupsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_entry_groups_async_from_dict(): + await test_list_entry_groups_async(request_type=dict) + + +def test_list_entry_groups_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.ListEntryGroupsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + call.return_value = datacatalog.ListEntryGroupsResponse() + client.list_entry_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_entry_groups_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.ListEntryGroupsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntryGroupsResponse()) + await client.list_entry_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_entry_groups_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.ListEntryGroupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_entry_groups( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_entry_groups_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_entry_groups( + datacatalog.ListEntryGroupsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_entry_groups_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.ListEntryGroupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntryGroupsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_entry_groups( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_entry_groups_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_entry_groups( + datacatalog.ListEntryGroupsRequest(), + parent='parent_value', + ) + + +def test_list_entry_groups_pager(transport_name: str = "grpc"): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + ], + next_page_token='abc', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[], + next_page_token='def', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + ], + next_page_token='ghi', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_entry_groups(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, datacatalog.EntryGroup) + for i in results) +def test_list_entry_groups_pages(transport_name: str = "grpc"): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + ], + next_page_token='abc', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[], + next_page_token='def', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + ], + next_page_token='ghi', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + ], + ), + RuntimeError, + ) + pages = list(client.list_entry_groups(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_entry_groups_async_pager(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + ], + next_page_token='abc', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[], + next_page_token='def', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + ], + next_page_token='ghi', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_entry_groups(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, datacatalog.EntryGroup) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_entry_groups_async_pages(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + ], + next_page_token='abc', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[], + next_page_token='def', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + ], + next_page_token='ghi', + ), + datacatalog.ListEntryGroupsResponse( + entry_groups=[ + datacatalog.EntryGroup(), + datacatalog.EntryGroup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_entry_groups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + datacatalog.CreateEntryRequest, + dict, +]) +def test_create_entry(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.Entry( + name='name_value', + linked_resource='linked_resource_value', + display_name='display_name_value', + description='description_value', + type_=datacatalog.EntryType.TABLE, + integrated_system=common.IntegratedSystem.BIGQUERY, + ) + response = client.create_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateEntryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) + assert response.name == 'name_value' + assert response.linked_resource == 'linked_resource_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_create_entry_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + client.create_entry() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateEntryRequest() + +@pytest.mark.asyncio +async def test_create_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.CreateEntryRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry( + name='name_value', + linked_resource='linked_resource_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.create_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateEntryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) + assert response.name == 'name_value' + assert response.linked_resource == 'linked_resource_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_create_entry_async_from_dict(): + await test_create_entry_async(request_type=dict) + + +def test_create_entry_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.CreateEntryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + call.return_value = datacatalog.Entry() + client.create_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_entry_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.CreateEntryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) + await client.create_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_entry_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.Entry() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_entry( + parent='parent_value', + entry_id='entry_id_value', + entry=datacatalog.Entry(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entry_id + mock_val = 'entry_id_value' + assert arg == mock_val + arg = args[0].entry + mock_val = datacatalog.Entry(name='name_value') + assert arg == mock_val + + +def test_create_entry_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_entry( + datacatalog.CreateEntryRequest(), + parent='parent_value', + entry_id='entry_id_value', + entry=datacatalog.Entry(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_entry_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.Entry() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_entry( + parent='parent_value', + entry_id='entry_id_value', + entry=datacatalog.Entry(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].entry_id + mock_val = 'entry_id_value' + assert arg == mock_val + arg = args[0].entry + mock_val = datacatalog.Entry(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_entry_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_entry( + datacatalog.CreateEntryRequest(), + parent='parent_value', + entry_id='entry_id_value', + entry=datacatalog.Entry(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.UpdateEntryRequest, + dict, +]) +def test_update_entry(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.Entry( + name='name_value', + linked_resource='linked_resource_value', + display_name='display_name_value', + description='description_value', + type_=datacatalog.EntryType.TABLE, + integrated_system=common.IntegratedSystem.BIGQUERY, + ) + response = client.update_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateEntryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) + assert response.name == 'name_value' + assert response.linked_resource == 'linked_resource_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_update_entry_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + client.update_entry() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateEntryRequest() + +@pytest.mark.asyncio +async def test_update_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.UpdateEntryRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry( + name='name_value', + linked_resource='linked_resource_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.update_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateEntryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) + assert response.name == 'name_value' + assert response.linked_resource == 'linked_resource_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_update_entry_async_from_dict(): + await test_update_entry_async(request_type=dict) + + +def test_update_entry_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.UpdateEntryRequest() + + request.entry.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + call.return_value = datacatalog.Entry() + client.update_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entry.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_entry_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.UpdateEntryRequest() + + request.entry.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) + await client.update_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'entry.name=name_value', + ) in kw['metadata'] + + +def test_update_entry_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.Entry() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_entry( + entry=datacatalog.Entry(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].entry + mock_val = datacatalog.Entry(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_entry_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_entry( + datacatalog.UpdateEntryRequest(), + entry=datacatalog.Entry(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_entry_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.Entry() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_entry( + entry=datacatalog.Entry(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].entry + mock_val = datacatalog.Entry(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_entry_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_entry( + datacatalog.UpdateEntryRequest(), + entry=datacatalog.Entry(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.DeleteEntryRequest, + dict, +]) +def test_delete_entry(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteEntryRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_entry_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + client.delete_entry() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteEntryRequest() + +@pytest.mark.asyncio +async def test_delete_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.DeleteEntryRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteEntryRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_entry_async_from_dict(): + await test_delete_entry_async(request_type=dict) + + +def test_delete_entry_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.DeleteEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + call.return_value = None + client.delete_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_entry_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.DeleteEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_entry_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_entry( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_entry_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entry( + datacatalog.DeleteEntryRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_entry_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_entry( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_entry_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_entry( + datacatalog.DeleteEntryRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.GetEntryRequest, + dict, +]) +def test_get_entry(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.Entry( + name='name_value', + linked_resource='linked_resource_value', + display_name='display_name_value', + description='description_value', + type_=datacatalog.EntryType.TABLE, + integrated_system=common.IntegratedSystem.BIGQUERY, + ) + response = client.get_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.GetEntryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) + assert response.name == 'name_value' + assert response.linked_resource == 'linked_resource_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_get_entry_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + client.get_entry() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.GetEntryRequest() + +@pytest.mark.asyncio +async def test_get_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.GetEntryRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry( + name='name_value', + linked_resource='linked_resource_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.get_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.GetEntryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) + assert response.name == 'name_value' + assert response.linked_resource == 'linked_resource_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_get_entry_async_from_dict(): + await test_get_entry_async(request_type=dict) + + +def test_get_entry_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.GetEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + call.return_value = datacatalog.Entry() + client.get_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_entry_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.GetEntryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) + await client.get_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_entry_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.Entry() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_entry( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_entry_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entry( + datacatalog.GetEntryRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_entry_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.Entry() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_entry( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_entry_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_entry( + datacatalog.GetEntryRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.LookupEntryRequest, + dict, +]) +def test_lookup_entry(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.lookup_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.Entry( + name='name_value', + linked_resource='linked_resource_value', + display_name='display_name_value', + description='description_value', + type_=datacatalog.EntryType.TABLE, + integrated_system=common.IntegratedSystem.BIGQUERY, + ) + response = client.lookup_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.LookupEntryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) + assert response.name == 'name_value' + assert response.linked_resource == 'linked_resource_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_lookup_entry_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.lookup_entry), + '__call__') as call: + client.lookup_entry() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.LookupEntryRequest() + +@pytest.mark.asyncio +async def test_lookup_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.LookupEntryRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.lookup_entry), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry( + name='name_value', + linked_resource='linked_resource_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.lookup_entry(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.LookupEntryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.Entry) + assert response.name == 'name_value' + assert response.linked_resource == 'linked_resource_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_lookup_entry_async_from_dict(): + await test_lookup_entry_async(request_type=dict) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.ListEntriesRequest, + dict, +]) +def test_list_entries(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.ListEntriesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ListEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntriesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_entries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + client.list_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ListEntriesRequest() + +@pytest.mark.asyncio +async def test_list_entries_async(transport: str = 'grpc_asyncio', request_type=datacatalog.ListEntriesRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntriesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ListEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntriesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_entries_async_from_dict(): + await test_list_entries_async(request_type=dict) + + +def test_list_entries_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.ListEntriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + call.return_value = datacatalog.ListEntriesResponse() + client.list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_entries_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.ListEntriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntriesResponse()) + await client.list_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_entries_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.ListEntriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_entries( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_entries_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_entries( + datacatalog.ListEntriesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_entries_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.ListEntriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntriesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_entries( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_entries_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_entries( + datacatalog.ListEntriesRequest(), + parent='parent_value', + ) + + +def test_list_entries_pager(transport_name: str = "grpc"): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + datacatalog.Entry(), + datacatalog.Entry(), + ], + next_page_token='abc', + ), + datacatalog.ListEntriesResponse( + entries=[], + next_page_token='def', + ), + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + ], + next_page_token='ghi', + ), + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + datacatalog.Entry(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_entries(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, datacatalog.Entry) + for i in results) +def test_list_entries_pages(transport_name: str = "grpc"): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + datacatalog.Entry(), + datacatalog.Entry(), + ], + next_page_token='abc', + ), + datacatalog.ListEntriesResponse( + entries=[], + next_page_token='def', + ), + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + ], + next_page_token='ghi', + ), + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + datacatalog.Entry(), + ], + ), + RuntimeError, + ) + pages = list(client.list_entries(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_entries_async_pager(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + datacatalog.Entry(), + datacatalog.Entry(), + ], + next_page_token='abc', + ), + datacatalog.ListEntriesResponse( + entries=[], + next_page_token='def', + ), + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + ], + next_page_token='ghi', + ), + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + datacatalog.Entry(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_entries(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, datacatalog.Entry) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_entries_async_pages(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + datacatalog.Entry(), + datacatalog.Entry(), + ], + next_page_token='abc', + ), + datacatalog.ListEntriesResponse( + entries=[], + next_page_token='def', + ), + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + ], + next_page_token='ghi', + ), + datacatalog.ListEntriesResponse( + entries=[ + datacatalog.Entry(), + datacatalog.Entry(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_entries(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + datacatalog.CreateTagTemplateRequest, + dict, +]) +def test_create_tag_template(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplate( + name='name_value', + display_name='display_name_value', + ) + response = client.create_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateTagTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + + +def test_create_tag_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template), + '__call__') as call: + client.create_tag_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateTagTemplateRequest() + +@pytest.mark.asyncio +async def test_create_tag_template_async(transport: str = 'grpc_asyncio', request_type=datacatalog.CreateTagTemplateRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate( + name='name_value', + display_name='display_name_value', + )) + response = await client.create_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateTagTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + + +@pytest.mark.asyncio +async def test_create_tag_template_async_from_dict(): + await test_create_tag_template_async(request_type=dict) + + +def test_create_tag_template_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.CreateTagTemplateRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template), + '__call__') as call: + call.return_value = tags.TagTemplate() + client.create_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_tag_template_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.CreateTagTemplateRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) + await client.create_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_tag_template_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_tag_template( + parent='parent_value', + tag_template_id='tag_template_id_value', + tag_template=tags.TagTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].tag_template_id + mock_val = 'tag_template_id_value' + assert arg == mock_val + arg = args[0].tag_template + mock_val = tags.TagTemplate(name='name_value') + assert arg == mock_val + + +def test_create_tag_template_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_tag_template( + datacatalog.CreateTagTemplateRequest(), + parent='parent_value', + tag_template_id='tag_template_id_value', + tag_template=tags.TagTemplate(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_tag_template_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_tag_template( + parent='parent_value', + tag_template_id='tag_template_id_value', + tag_template=tags.TagTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].tag_template_id + mock_val = 'tag_template_id_value' + assert arg == mock_val + arg = args[0].tag_template + mock_val = tags.TagTemplate(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_tag_template_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_tag_template( + datacatalog.CreateTagTemplateRequest(), + parent='parent_value', + tag_template_id='tag_template_id_value', + tag_template=tags.TagTemplate(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.GetTagTemplateRequest, + dict, +]) +def test_get_tag_template(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplate( + name='name_value', + display_name='display_name_value', + ) + response = client.get_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.GetTagTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + + +def test_get_tag_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tag_template), + '__call__') as call: + client.get_tag_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.GetTagTemplateRequest() + +@pytest.mark.asyncio +async def test_get_tag_template_async(transport: str = 'grpc_asyncio', request_type=datacatalog.GetTagTemplateRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate( + name='name_value', + display_name='display_name_value', + )) + response = await client.get_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.GetTagTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + + +@pytest.mark.asyncio +async def test_get_tag_template_async_from_dict(): + await test_get_tag_template_async(request_type=dict) + + +def test_get_tag_template_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.GetTagTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tag_template), + '__call__') as call: + call.return_value = tags.TagTemplate() + client.get_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_tag_template_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.GetTagTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tag_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) + await client.get_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_tag_template_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_tag_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_tag_template_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_tag_template( + datacatalog.GetTagTemplateRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_tag_template_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_tag_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_tag_template_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_tag_template( + datacatalog.GetTagTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.UpdateTagTemplateRequest, + dict, +]) +def test_update_tag_template(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplate( + name='name_value', + display_name='display_name_value', + ) + response = client.update_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateTagTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + + +def test_update_tag_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template), + '__call__') as call: + client.update_tag_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateTagTemplateRequest() + +@pytest.mark.asyncio +async def test_update_tag_template_async(transport: str = 'grpc_asyncio', request_type=datacatalog.UpdateTagTemplateRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate( + name='name_value', + display_name='display_name_value', + )) + response = await client.update_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateTagTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + + +@pytest.mark.asyncio +async def test_update_tag_template_async_from_dict(): + await test_update_tag_template_async(request_type=dict) + + +def test_update_tag_template_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.UpdateTagTemplateRequest() + + request.tag_template.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template), + '__call__') as call: + call.return_value = tags.TagTemplate() + client.update_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'tag_template.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_tag_template_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.UpdateTagTemplateRequest() + + request.tag_template.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) + await client.update_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'tag_template.name=name_value', + ) in kw['metadata'] + + +def test_update_tag_template_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_tag_template( + tag_template=tags.TagTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].tag_template + mock_val = tags.TagTemplate(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_tag_template_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_tag_template( + datacatalog.UpdateTagTemplateRequest(), + tag_template=tags.TagTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_tag_template_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_tag_template( + tag_template=tags.TagTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].tag_template + mock_val = tags.TagTemplate(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_tag_template_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_tag_template( + datacatalog.UpdateTagTemplateRequest(), + tag_template=tags.TagTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.DeleteTagTemplateRequest, + dict, +]) +def test_delete_tag_template(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteTagTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_tag_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template), + '__call__') as call: + client.delete_tag_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteTagTemplateRequest() + +@pytest.mark.asyncio +async def test_delete_tag_template_async(transport: str = 'grpc_asyncio', request_type=datacatalog.DeleteTagTemplateRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteTagTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_tag_template_async_from_dict(): + await test_delete_tag_template_async(request_type=dict) + + +def test_delete_tag_template_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.DeleteTagTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template), + '__call__') as call: + call.return_value = None + client.delete_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_tag_template_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.DeleteTagTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_tag_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_tag_template_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_tag_template( + name='name_value', + force=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].force + mock_val = True + assert arg == mock_val + + +def test_delete_tag_template_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_tag_template( + datacatalog.DeleteTagTemplateRequest(), + name='name_value', + force=True, + ) + +@pytest.mark.asyncio +async def test_delete_tag_template_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_tag_template( + name='name_value', + force=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].force + mock_val = True + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_tag_template_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_tag_template( + datacatalog.DeleteTagTemplateRequest(), + name='name_value', + force=True, + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.CreateTagTemplateFieldRequest, + dict, +]) +def test_create_tag_template_field(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField( + name='name_value', + display_name='display_name_value', + is_required=True, + description='description_value', + order=540, + ) + response = client.create_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateTagTemplateFieldRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.is_required is True + assert response.description == 'description_value' + assert response.order == 540 + + +def test_create_tag_template_field_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template_field), + '__call__') as call: + client.create_tag_template_field() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateTagTemplateFieldRequest() + +@pytest.mark.asyncio +async def test_create_tag_template_field_async(transport: str = 'grpc_asyncio', request_type=datacatalog.CreateTagTemplateFieldRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField( + name='name_value', + display_name='display_name_value', + is_required=True, + description='description_value', + order=540, + )) + response = await client.create_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateTagTemplateFieldRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.is_required is True + assert response.description == 'description_value' + assert response.order == 540 + + +@pytest.mark.asyncio +async def test_create_tag_template_field_async_from_dict(): + await test_create_tag_template_field_async(request_type=dict) + + +def test_create_tag_template_field_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.CreateTagTemplateFieldRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template_field), + '__call__') as call: + call.return_value = tags.TagTemplateField() + client.create_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_tag_template_field_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.CreateTagTemplateFieldRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template_field), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) + await client.create_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_tag_template_field_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_tag_template_field( + parent='parent_value', + tag_template_field_id='tag_template_field_id_value', + tag_template_field=tags.TagTemplateField(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].tag_template_field_id + mock_val = 'tag_template_field_id_value' + assert arg == mock_val + arg = args[0].tag_template_field + mock_val = tags.TagTemplateField(name='name_value') + assert arg == mock_val + + +def test_create_tag_template_field_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_tag_template_field( + datacatalog.CreateTagTemplateFieldRequest(), + parent='parent_value', + tag_template_field_id='tag_template_field_id_value', + tag_template_field=tags.TagTemplateField(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_tag_template_field_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_tag_template_field( + parent='parent_value', + tag_template_field_id='tag_template_field_id_value', + tag_template_field=tags.TagTemplateField(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].tag_template_field_id + mock_val = 'tag_template_field_id_value' + assert arg == mock_val + arg = args[0].tag_template_field + mock_val = tags.TagTemplateField(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_tag_template_field_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_tag_template_field( + datacatalog.CreateTagTemplateFieldRequest(), + parent='parent_value', + tag_template_field_id='tag_template_field_id_value', + tag_template_field=tags.TagTemplateField(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.UpdateTagTemplateFieldRequest, + dict, +]) +def test_update_tag_template_field(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField( + name='name_value', + display_name='display_name_value', + is_required=True, + description='description_value', + order=540, + ) + response = client.update_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.is_required is True + assert response.description == 'description_value' + assert response.order == 540 + + +def test_update_tag_template_field_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template_field), + '__call__') as call: + client.update_tag_template_field() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() + +@pytest.mark.asyncio +async def test_update_tag_template_field_async(transport: str = 'grpc_asyncio', request_type=datacatalog.UpdateTagTemplateFieldRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField( + name='name_value', + display_name='display_name_value', + is_required=True, + description='description_value', + order=540, + )) + response = await client.update_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.is_required is True + assert response.description == 'description_value' + assert response.order == 540 + + +@pytest.mark.asyncio +async def test_update_tag_template_field_async_from_dict(): + await test_update_tag_template_field_async(request_type=dict) + + +def test_update_tag_template_field_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.UpdateTagTemplateFieldRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template_field), + '__call__') as call: + call.return_value = tags.TagTemplateField() + client.update_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_tag_template_field_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.UpdateTagTemplateFieldRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template_field), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) + await client.update_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_update_tag_template_field_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_tag_template_field( + name='name_value', + tag_template_field=tags.TagTemplateField(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].tag_template_field + mock_val = tags.TagTemplateField(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_tag_template_field_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_tag_template_field( + datacatalog.UpdateTagTemplateFieldRequest(), + name='name_value', + tag_template_field=tags.TagTemplateField(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_tag_template_field_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_tag_template_field( + name='name_value', + tag_template_field=tags.TagTemplateField(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].tag_template_field + mock_val = tags.TagTemplateField(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_tag_template_field_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_tag_template_field( + datacatalog.UpdateTagTemplateFieldRequest(), + name='name_value', + tag_template_field=tags.TagTemplateField(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.RenameTagTemplateFieldRequest, + dict, +]) +def test_rename_tag_template_field(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField( + name='name_value', + display_name='display_name_value', + is_required=True, + description='description_value', + order=540, + ) + response = client.rename_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.RenameTagTemplateFieldRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.is_required is True + assert response.description == 'description_value' + assert response.order == 540 + + +def test_rename_tag_template_field_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field), + '__call__') as call: + client.rename_tag_template_field() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.RenameTagTemplateFieldRequest() + +@pytest.mark.asyncio +async def test_rename_tag_template_field_async(transport: str = 'grpc_asyncio', request_type=datacatalog.RenameTagTemplateFieldRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField( + name='name_value', + display_name='display_name_value', + is_required=True, + description='description_value', + order=540, + )) + response = await client.rename_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.RenameTagTemplateFieldRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.is_required is True + assert response.description == 'description_value' + assert response.order == 540 + + +@pytest.mark.asyncio +async def test_rename_tag_template_field_async_from_dict(): + await test_rename_tag_template_field_async(request_type=dict) + + +def test_rename_tag_template_field_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.RenameTagTemplateFieldRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field), + '__call__') as call: + call.return_value = tags.TagTemplateField() + client.rename_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_rename_tag_template_field_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.RenameTagTemplateFieldRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) + await client.rename_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_rename_tag_template_field_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.rename_tag_template_field( + name='name_value', + new_tag_template_field_id='new_tag_template_field_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].new_tag_template_field_id + mock_val = 'new_tag_template_field_id_value' + assert arg == mock_val + + +def test_rename_tag_template_field_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rename_tag_template_field( + datacatalog.RenameTagTemplateFieldRequest(), + name='name_value', + new_tag_template_field_id='new_tag_template_field_id_value', + ) + +@pytest.mark.asyncio +async def test_rename_tag_template_field_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.rename_tag_template_field( + name='name_value', + new_tag_template_field_id='new_tag_template_field_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].new_tag_template_field_id + mock_val = 'new_tag_template_field_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_rename_tag_template_field_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.rename_tag_template_field( + datacatalog.RenameTagTemplateFieldRequest(), + name='name_value', + new_tag_template_field_id='new_tag_template_field_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.RenameTagTemplateFieldEnumValueRequest, + dict, +]) +def test_rename_tag_template_field_enum_value(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField( + name='name_value', + display_name='display_name_value', + is_required=True, + description='description_value', + order=540, + ) + response = client.rename_tag_template_field_enum_value(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.RenameTagTemplateFieldEnumValueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.is_required is True + assert response.description == 'description_value' + assert response.order == 540 + + +def test_rename_tag_template_field_enum_value_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), + '__call__') as call: + client.rename_tag_template_field_enum_value() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.RenameTagTemplateFieldEnumValueRequest() + +@pytest.mark.asyncio +async def test_rename_tag_template_field_enum_value_async(transport: str = 'grpc_asyncio', request_type=datacatalog.RenameTagTemplateFieldEnumValueRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField( + name='name_value', + display_name='display_name_value', + is_required=True, + description='description_value', + order=540, + )) + response = await client.rename_tag_template_field_enum_value(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.RenameTagTemplateFieldEnumValueRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.TagTemplateField) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.is_required is True + assert response.description == 'description_value' + assert response.order == 540 + + +@pytest.mark.asyncio +async def test_rename_tag_template_field_enum_value_async_from_dict(): + await test_rename_tag_template_field_enum_value_async(request_type=dict) + + +def test_rename_tag_template_field_enum_value_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.RenameTagTemplateFieldEnumValueRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), + '__call__') as call: + call.return_value = tags.TagTemplateField() + client.rename_tag_template_field_enum_value(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_rename_tag_template_field_enum_value_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.RenameTagTemplateFieldEnumValueRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) + await client.rename_tag_template_field_enum_value(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_rename_tag_template_field_enum_value_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.rename_tag_template_field_enum_value( + name='name_value', + new_enum_value_display_name='new_enum_value_display_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].new_enum_value_display_name + mock_val = 'new_enum_value_display_name_value' + assert arg == mock_val + + +def test_rename_tag_template_field_enum_value_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rename_tag_template_field_enum_value( + datacatalog.RenameTagTemplateFieldEnumValueRequest(), + name='name_value', + new_enum_value_display_name='new_enum_value_display_name_value', + ) + +@pytest.mark.asyncio +async def test_rename_tag_template_field_enum_value_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rename_tag_template_field_enum_value), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.TagTemplateField() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.rename_tag_template_field_enum_value( + name='name_value', + new_enum_value_display_name='new_enum_value_display_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].new_enum_value_display_name + mock_val = 'new_enum_value_display_name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_rename_tag_template_field_enum_value_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.rename_tag_template_field_enum_value( + datacatalog.RenameTagTemplateFieldEnumValueRequest(), + name='name_value', + new_enum_value_display_name='new_enum_value_display_name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.DeleteTagTemplateFieldRequest, + dict, +]) +def test_delete_tag_template_field(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteTagTemplateFieldRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_tag_template_field_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template_field), + '__call__') as call: + client.delete_tag_template_field() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteTagTemplateFieldRequest() + +@pytest.mark.asyncio +async def test_delete_tag_template_field_async(transport: str = 'grpc_asyncio', request_type=datacatalog.DeleteTagTemplateFieldRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteTagTemplateFieldRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_tag_template_field_async_from_dict(): + await test_delete_tag_template_field_async(request_type=dict) + + +def test_delete_tag_template_field_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.DeleteTagTemplateFieldRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template_field), + '__call__') as call: + call.return_value = None + client.delete_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_tag_template_field_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.DeleteTagTemplateFieldRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template_field), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_tag_template_field(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_tag_template_field_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_tag_template_field( + name='name_value', + force=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].force + mock_val = True + assert arg == mock_val + + +def test_delete_tag_template_field_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_tag_template_field( + datacatalog.DeleteTagTemplateFieldRequest(), + name='name_value', + force=True, + ) + +@pytest.mark.asyncio +async def test_delete_tag_template_field_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag_template_field), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_tag_template_field( + name='name_value', + force=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].force + mock_val = True + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_tag_template_field_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_tag_template_field( + datacatalog.DeleteTagTemplateFieldRequest(), + name='name_value', + force=True, + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.CreateTagRequest, + dict, +]) +def test_create_tag(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.Tag( + name='name_value', + template='template_value', + template_display_name='template_display_name_value', + column='column_value', + ) + response = client.create_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.Tag) + assert response.name == 'name_value' + assert response.template == 'template_value' + assert response.template_display_name == 'template_display_name_value' + + +def test_create_tag_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag), + '__call__') as call: + client.create_tag() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateTagRequest() + +@pytest.mark.asyncio +async def test_create_tag_async(transport: str = 'grpc_asyncio', request_type=datacatalog.CreateTagRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag( + name='name_value', + template='template_value', + template_display_name='template_display_name_value', + )) + response = await client.create_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.CreateTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.Tag) + assert response.name == 'name_value' + assert response.template == 'template_value' + assert response.template_display_name == 'template_display_name_value' + + +@pytest.mark.asyncio +async def test_create_tag_async_from_dict(): + await test_create_tag_async(request_type=dict) + + +def test_create_tag_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.CreateTagRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag), + '__call__') as call: + call.return_value = tags.Tag() + client.create_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_tag_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.CreateTagRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) + await client.create_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_tag_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.Tag() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_tag( + parent='parent_value', + tag=tags.Tag(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].tag + mock_val = tags.Tag(name='name_value') + assert arg == mock_val + + +def test_create_tag_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_tag( + datacatalog.CreateTagRequest(), + parent='parent_value', + tag=tags.Tag(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_tag_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.Tag() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_tag( + parent='parent_value', + tag=tags.Tag(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].tag + mock_val = tags.Tag(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_tag_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_tag( + datacatalog.CreateTagRequest(), + parent='parent_value', + tag=tags.Tag(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.UpdateTagRequest, + dict, +]) +def test_update_tag(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.Tag( + name='name_value', + template='template_value', + template_display_name='template_display_name_value', + column='column_value', + ) + response = client.update_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.Tag) + assert response.name == 'name_value' + assert response.template == 'template_value' + assert response.template_display_name == 'template_display_name_value' + + +def test_update_tag_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag), + '__call__') as call: + client.update_tag() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateTagRequest() + +@pytest.mark.asyncio +async def test_update_tag_async(transport: str = 'grpc_asyncio', request_type=datacatalog.UpdateTagRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag( + name='name_value', + template='template_value', + template_display_name='template_display_name_value', + )) + response = await client.update_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.UpdateTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, tags.Tag) + assert response.name == 'name_value' + assert response.template == 'template_value' + assert response.template_display_name == 'template_display_name_value' + + +@pytest.mark.asyncio +async def test_update_tag_async_from_dict(): + await test_update_tag_async(request_type=dict) + + +def test_update_tag_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.UpdateTagRequest() + + request.tag.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag), + '__call__') as call: + call.return_value = tags.Tag() + client.update_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'tag.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_tag_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.UpdateTagRequest() + + request.tag.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) + await client.update_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'tag.name=name_value', + ) in kw['metadata'] + + +def test_update_tag_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.Tag() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_tag( + tag=tags.Tag(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].tag + mock_val = tags.Tag(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_tag_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_tag( + datacatalog.UpdateTagRequest(), + tag=tags.Tag(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_tag_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = tags.Tag() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_tag( + tag=tags.Tag(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].tag + mock_val = tags.Tag(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_tag_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_tag( + datacatalog.UpdateTagRequest(), + tag=tags.Tag(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.DeleteTagRequest, + dict, +]) +def test_delete_tag(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteTagRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_tag_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag), + '__call__') as call: + client.delete_tag() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteTagRequest() + +@pytest.mark.asyncio +async def test_delete_tag_async(transport: str = 'grpc_asyncio', request_type=datacatalog.DeleteTagRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.DeleteTagRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_tag_async_from_dict(): + await test_delete_tag_async(request_type=dict) + + +def test_delete_tag_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.DeleteTagRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag), + '__call__') as call: + call.return_value = None + client.delete_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_tag_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.DeleteTagRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_tag_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_tag( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_tag_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_tag( + datacatalog.DeleteTagRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_tag_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_tag( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_tag_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_tag( + datacatalog.DeleteTagRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + datacatalog.ListTagsRequest, + dict, +]) +def test_list_tags(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.ListTagsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ListTagsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTagsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_tags_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), + '__call__') as call: + client.list_tags() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ListTagsRequest() + +@pytest.mark.asyncio +async def test_list_tags_async(transport: str = 'grpc_asyncio', request_type=datacatalog.ListTagsRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListTagsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.ListTagsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTagsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_tags_async_from_dict(): + await test_list_tags_async(request_type=dict) + + +def test_list_tags_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.ListTagsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), + '__call__') as call: + call.return_value = datacatalog.ListTagsResponse() + client.list_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_tags_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.ListTagsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListTagsResponse()) + await client.list_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_tags_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.ListTagsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_tags( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_tags_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tags( + datacatalog.ListTagsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_tags_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.ListTagsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListTagsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_tags( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_tags_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_tags( + datacatalog.ListTagsRequest(), + parent='parent_value', + ) + + +def test_list_tags_pager(transport_name: str = "grpc"): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + tags.Tag(), + tags.Tag(), + ], + next_page_token='abc', + ), + datacatalog.ListTagsResponse( + tags=[], + next_page_token='def', + ), + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + ], + next_page_token='ghi', + ), + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + tags.Tag(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_tags(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tags.Tag) + for i in results) +def test_list_tags_pages(transport_name: str = "grpc"): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + tags.Tag(), + tags.Tag(), + ], + next_page_token='abc', + ), + datacatalog.ListTagsResponse( + tags=[], + next_page_token='def', + ), + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + ], + next_page_token='ghi', + ), + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + tags.Tag(), + ], + ), + RuntimeError, + ) + pages = list(client.list_tags(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_tags_async_pager(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + tags.Tag(), + tags.Tag(), + ], + next_page_token='abc', + ), + datacatalog.ListTagsResponse( + tags=[], + next_page_token='def', + ), + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + ], + next_page_token='ghi', + ), + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + tags.Tag(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_tags(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, tags.Tag) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_tags_async_pages(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tags), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + tags.Tag(), + tags.Tag(), + ], + next_page_token='abc', + ), + datacatalog.ListTagsResponse( + tags=[], + next_page_token='def', + ), + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + ], + next_page_token='ghi', + ), + datacatalog.ListTagsResponse( + tags=[ + tags.Tag(), + tags.Tag(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_tags(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.SetIamPolicyRequest, + dict, +]) +def test_set_iam_policy(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_set_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + + +def test_set_iam_policy_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_set_iam_policy_from_dict_foreign(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy(request={ + 'resource': 'resource_value', + 'policy': policy_pb2.Policy(version=774), + 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), + } + ) + call.assert_called() + + +def test_set_iam_policy_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + + +def test_set_iam_policy_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.GetIamPolicyRequest, + dict, +]) +def test_get_iam_policy(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_get_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + + +def test_get_iam_policy_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_get_iam_policy_from_dict_foreign(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy(request={ + 'resource': 'resource_value', + 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_get_iam_policy_flattened(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + + +def test_get_iam_policy_flattened_error(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy( + resource='resource_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = 'resource_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource='resource_value', + ) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, +]) +def test_test_iam_permissions(request_type, transport: str = 'grpc'): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + client.test_iam_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + )) + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + + +def test_test_iam_permissions_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_test_iam_permissions_from_dict_foreign(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions(request={ + 'resource': 'resource_value', + 'permissions': ['permissions_value'], + } + ) + call.assert_called() + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataCatalogGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataCatalogGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataCatalogClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DataCatalogGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataCatalogClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataCatalogClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataCatalogGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataCatalogClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataCatalogGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DataCatalogClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataCatalogGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataCatalogGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.DataCatalogGrpcTransport, + transports.DataCatalogGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", +]) +def test_transport_kind(transport_name): + transport = DataCatalogClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DataCatalogGrpcTransport, + ) + +def test_data_catalog_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DataCatalogTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_data_catalog_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.datacatalog_v1beta1.services.data_catalog.transports.DataCatalogTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.DataCatalogTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'search_catalog', + 'create_entry_group', + 'update_entry_group', + 'get_entry_group', + 'delete_entry_group', + 'list_entry_groups', + 'create_entry', + 'update_entry', + 'delete_entry', + 'get_entry', + 'lookup_entry', + 'list_entries', + 'create_tag_template', + 'get_tag_template', + 'update_tag_template', + 'delete_tag_template', + 'create_tag_template_field', + 'update_tag_template_field', + 'rename_tag_template_field', + 'rename_tag_template_field_enum_value', + 'delete_tag_template_field', + 'create_tag', + 'update_tag', + 'delete_tag', + 'list_tags', + 'set_iam_policy', + 'get_iam_policy', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_data_catalog_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.datacatalog_v1beta1.services.data_catalog.transports.DataCatalogTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataCatalogTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_data_catalog_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.datacatalog_v1beta1.services.data_catalog.transports.DataCatalogTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataCatalogTransport() + adc.assert_called_once() + + +def test_data_catalog_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataCatalogClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataCatalogGrpcTransport, + transports.DataCatalogGrpcAsyncIOTransport, + ], +) +def test_data_catalog_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataCatalogGrpcTransport, + transports.DataCatalogGrpcAsyncIOTransport, + ], +) +def test_data_catalog_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataCatalogGrpcTransport, grpc_helpers), + (transports.DataCatalogGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_data_catalog_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="datacatalog.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport]) +def test_data_catalog_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_data_catalog_host_no_port(transport_name): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'datacatalog.googleapis.com:443' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_data_catalog_host_with_port(transport_name): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'datacatalog.googleapis.com:8000' + ) + +def test_data_catalog_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataCatalogGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_data_catalog_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataCatalogGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport]) +def test_data_catalog_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport]) +def test_data_catalog_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_entry_path(): + project = "squid" + location = "clam" + entry_group = "whelk" + entry = "octopus" + expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format(project=project, location=location, entry_group=entry_group, entry=entry, ) + actual = DataCatalogClient.entry_path(project, location, entry_group, entry) + assert expected == actual + + +def test_parse_entry_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "entry_group": "cuttlefish", + "entry": "mussel", + } + path = DataCatalogClient.entry_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_entry_path(path) + assert expected == actual + +def test_entry_group_path(): + project = "winkle" + location = "nautilus" + entry_group = "scallop" + expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}".format(project=project, location=location, entry_group=entry_group, ) + actual = DataCatalogClient.entry_group_path(project, location, entry_group) + assert expected == actual + + +def test_parse_entry_group_path(): + expected = { + "project": "abalone", + "location": "squid", + "entry_group": "clam", + } + path = DataCatalogClient.entry_group_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_entry_group_path(path) + assert expected == actual + +def test_tag_path(): + project = "whelk" + location = "octopus" + entry_group = "oyster" + entry = "nudibranch" + tag = "cuttlefish" + expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format(project=project, location=location, entry_group=entry_group, entry=entry, tag=tag, ) + actual = DataCatalogClient.tag_path(project, location, entry_group, entry, tag) + assert expected == actual + + +def test_parse_tag_path(): + expected = { + "project": "mussel", + "location": "winkle", + "entry_group": "nautilus", + "entry": "scallop", + "tag": "abalone", + } + path = DataCatalogClient.tag_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_tag_path(path) + assert expected == actual + +def test_tag_template_path(): + project = "squid" + location = "clam" + tag_template = "whelk" + expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}".format(project=project, location=location, tag_template=tag_template, ) + actual = DataCatalogClient.tag_template_path(project, location, tag_template) + assert expected == actual + + +def test_parse_tag_template_path(): + expected = { + "project": "octopus", + "location": "oyster", + "tag_template": "nudibranch", + } + path = DataCatalogClient.tag_template_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_tag_template_path(path) + assert expected == actual + +def test_tag_template_field_path(): + project = "cuttlefish" + location = "mussel" + tag_template = "winkle" + field = "nautilus" + expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}".format(project=project, location=location, tag_template=tag_template, field=field, ) + actual = DataCatalogClient.tag_template_field_path(project, location, tag_template, field) + assert expected == actual + + +def test_parse_tag_template_field_path(): + expected = { + "project": "scallop", + "location": "abalone", + "tag_template": "squid", + "field": "clam", + } + path = DataCatalogClient.tag_template_field_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_tag_template_field_path(path) + assert expected == actual + +def test_tag_template_field_enum_value_path(): + project = "whelk" + location = "octopus" + tag_template = "oyster" + tag_template_field_id = "nudibranch" + enum_value_display_name = "cuttlefish" + expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name}".format(project=project, location=location, tag_template=tag_template, tag_template_field_id=tag_template_field_id, enum_value_display_name=enum_value_display_name, ) + actual = DataCatalogClient.tag_template_field_enum_value_path(project, location, tag_template, tag_template_field_id, enum_value_display_name) + assert expected == actual + + +def test_parse_tag_template_field_enum_value_path(): + expected = { + "project": "mussel", + "location": "winkle", + "tag_template": "nautilus", + "tag_template_field_id": "scallop", + "enum_value_display_name": "abalone", + } + path = DataCatalogClient.tag_template_field_enum_value_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_tag_template_field_enum_value_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = DataCatalogClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = DataCatalogClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = DataCatalogClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = DataCatalogClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = DataCatalogClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = DataCatalogClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = DataCatalogClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = DataCatalogClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = DataCatalogClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = DataCatalogClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DataCatalogClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.DataCatalogTransport, '_prep_wrapped_messages') as prep: + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.DataCatalogTransport, '_prep_wrapped_messages') as prep: + transport_class = DataCatalogClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DataCatalogAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (DataCatalogClient, transports.DataCatalogGrpcTransport), + (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py new file mode 100644 index 000000000000..f18f350ff488 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py @@ -0,0 +1,4521 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.datacatalog_v1beta1.services.policy_tag_manager import PolicyTagManagerAsyncClient +from google.cloud.datacatalog_v1beta1.services.policy_tag_manager import PolicyTagManagerClient +from google.cloud.datacatalog_v1beta1.services.policy_tag_manager import pagers +from google.cloud.datacatalog_v1beta1.services.policy_tag_manager import transports +from google.cloud.datacatalog_v1beta1.types import common +from google.cloud.datacatalog_v1beta1.types import policytagmanager +from google.cloud.datacatalog_v1beta1.types import timestamps +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PolicyTagManagerClient._get_default_mtls_endpoint(None) is None + assert PolicyTagManagerClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert PolicyTagManagerClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert PolicyTagManagerClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert PolicyTagManagerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert PolicyTagManagerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (PolicyTagManagerClient, "grpc"), + (PolicyTagManagerAsyncClient, "grpc_asyncio"), +]) +def test_policy_tag_manager_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'datacatalog.googleapis.com:443' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.PolicyTagManagerGrpcTransport, "grpc"), + (transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_policy_tag_manager_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (PolicyTagManagerClient, "grpc"), + (PolicyTagManagerAsyncClient, "grpc_asyncio"), +]) +def test_policy_tag_manager_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'datacatalog.googleapis.com:443' + ) + + +def test_policy_tag_manager_client_get_transport_class(): + transport = PolicyTagManagerClient.get_transport_class() + available_transports = [ + transports.PolicyTagManagerGrpcTransport, + ] + assert transport in available_transports + + transport = PolicyTagManagerClient.get_transport_class("grpc") + assert transport == transports.PolicyTagManagerGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc"), + (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(PolicyTagManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerClient)) +@mock.patch.object(PolicyTagManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerAsyncClient)) +def test_policy_tag_manager_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(PolicyTagManagerClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(PolicyTagManagerClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc", "true"), + (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc", "false"), + (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(PolicyTagManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerClient)) +@mock.patch.object(PolicyTagManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_policy_tag_manager_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + PolicyTagManagerClient, PolicyTagManagerAsyncClient +]) +@mock.patch.object(PolicyTagManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerClient)) +@mock.patch.object(PolicyTagManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerAsyncClient)) +def test_policy_tag_manager_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc"), + (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_policy_tag_manager_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc", grpc_helpers), + (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_policy_tag_manager_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_policy_tag_manager_client_client_options_from_dict(): + with mock.patch('google.cloud.datacatalog_v1beta1.services.policy_tag_manager.transports.PolicyTagManagerGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = PolicyTagManagerClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc", grpc_helpers), + (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_policy_tag_manager_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="datacatalog.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + policytagmanager.CreateTaxonomyRequest, + dict, +]) +def test_create_taxonomy(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy( + name='name_value', + display_name='display_name_value', + description='description_value', + policy_tag_count=1715, + activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], + ) + response = client.create_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.CreateTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.policy_tag_count == 1715 + assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] + + +def test_create_taxonomy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_taxonomy), + '__call__') as call: + client.create_taxonomy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.CreateTaxonomyRequest() + +@pytest.mark.asyncio +async def test_create_taxonomy_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.CreateTaxonomyRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy( + name='name_value', + display_name='display_name_value', + description='description_value', + policy_tag_count=1715, + activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], + )) + response = await client.create_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.CreateTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.policy_tag_count == 1715 + assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] + + +@pytest.mark.asyncio +async def test_create_taxonomy_async_from_dict(): + await test_create_taxonomy_async(request_type=dict) + + +def test_create_taxonomy_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.CreateTaxonomyRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_taxonomy), + '__call__') as call: + call.return_value = policytagmanager.Taxonomy() + client.create_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_taxonomy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.CreateTaxonomyRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_taxonomy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) + await client.create_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_taxonomy_flattened(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_taxonomy( + parent='parent_value', + taxonomy=policytagmanager.Taxonomy(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].taxonomy + mock_val = policytagmanager.Taxonomy(name='name_value') + assert arg == mock_val + + +def test_create_taxonomy_flattened_error(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_taxonomy( + policytagmanager.CreateTaxonomyRequest(), + parent='parent_value', + taxonomy=policytagmanager.Taxonomy(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_taxonomy_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_taxonomy( + parent='parent_value', + taxonomy=policytagmanager.Taxonomy(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].taxonomy + mock_val = policytagmanager.Taxonomy(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_taxonomy_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_taxonomy( + policytagmanager.CreateTaxonomyRequest(), + parent='parent_value', + taxonomy=policytagmanager.Taxonomy(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + policytagmanager.DeleteTaxonomyRequest, + dict, +]) +def test_delete_taxonomy(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.DeleteTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_taxonomy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_taxonomy), + '__call__') as call: + client.delete_taxonomy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.DeleteTaxonomyRequest() + +@pytest.mark.asyncio +async def test_delete_taxonomy_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.DeleteTaxonomyRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.DeleteTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_taxonomy_async_from_dict(): + await test_delete_taxonomy_async(request_type=dict) + + +def test_delete_taxonomy_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.DeleteTaxonomyRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_taxonomy), + '__call__') as call: + call.return_value = None + client.delete_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_taxonomy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.DeleteTaxonomyRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_taxonomy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_taxonomy_flattened(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_taxonomy( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_taxonomy_flattened_error(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_taxonomy( + policytagmanager.DeleteTaxonomyRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_taxonomy_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_taxonomy( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_taxonomy_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_taxonomy( + policytagmanager.DeleteTaxonomyRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + policytagmanager.UpdateTaxonomyRequest, + dict, +]) +def test_update_taxonomy(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy( + name='name_value', + display_name='display_name_value', + description='description_value', + policy_tag_count=1715, + activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], + ) + response = client.update_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.UpdateTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.policy_tag_count == 1715 + assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] + + +def test_update_taxonomy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_taxonomy), + '__call__') as call: + client.update_taxonomy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.UpdateTaxonomyRequest() + +@pytest.mark.asyncio +async def test_update_taxonomy_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.UpdateTaxonomyRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy( + name='name_value', + display_name='display_name_value', + description='description_value', + policy_tag_count=1715, + activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], + )) + response = await client.update_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.UpdateTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.policy_tag_count == 1715 + assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] + + +@pytest.mark.asyncio +async def test_update_taxonomy_async_from_dict(): + await test_update_taxonomy_async(request_type=dict) + + +def test_update_taxonomy_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.UpdateTaxonomyRequest() + + request.taxonomy.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_taxonomy), + '__call__') as call: + call.return_value = policytagmanager.Taxonomy() + client.update_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'taxonomy.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_taxonomy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.UpdateTaxonomyRequest() + + request.taxonomy.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_taxonomy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) + await client.update_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'taxonomy.name=name_value', + ) in kw['metadata'] + + +def test_update_taxonomy_flattened(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_taxonomy( + taxonomy=policytagmanager.Taxonomy(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].taxonomy + mock_val = policytagmanager.Taxonomy(name='name_value') + assert arg == mock_val + + +def test_update_taxonomy_flattened_error(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_taxonomy( + policytagmanager.UpdateTaxonomyRequest(), + taxonomy=policytagmanager.Taxonomy(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_update_taxonomy_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_taxonomy( + taxonomy=policytagmanager.Taxonomy(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].taxonomy + mock_val = policytagmanager.Taxonomy(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_taxonomy_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_taxonomy( + policytagmanager.UpdateTaxonomyRequest(), + taxonomy=policytagmanager.Taxonomy(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + policytagmanager.ListTaxonomiesRequest, + dict, +]) +def test_list_taxonomies(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListTaxonomiesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.ListTaxonomiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTaxonomiesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_taxonomies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), + '__call__') as call: + client.list_taxonomies() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.ListTaxonomiesRequest() + +@pytest.mark.asyncio +async def test_list_taxonomies_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.ListTaxonomiesRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListTaxonomiesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.ListTaxonomiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTaxonomiesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_taxonomies_async_from_dict(): + await test_list_taxonomies_async(request_type=dict) + + +def test_list_taxonomies_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.ListTaxonomiesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), + '__call__') as call: + call.return_value = policytagmanager.ListTaxonomiesResponse() + client.list_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_taxonomies_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.ListTaxonomiesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListTaxonomiesResponse()) + await client.list_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_taxonomies_flattened(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListTaxonomiesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_taxonomies( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_taxonomies_flattened_error(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_taxonomies( + policytagmanager.ListTaxonomiesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_taxonomies_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListTaxonomiesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListTaxonomiesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_taxonomies( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_taxonomies_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_taxonomies( + policytagmanager.ListTaxonomiesRequest(), + parent='parent_value', + ) + + +def test_list_taxonomies_pager(transport_name: str = "grpc"): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + next_page_token='abc', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[], + next_page_token='def', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + ], + next_page_token='ghi', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_taxonomies(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, policytagmanager.Taxonomy) + for i in results) +def test_list_taxonomies_pages(transport_name: str = "grpc"): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + next_page_token='abc', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[], + next_page_token='def', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + ], + next_page_token='ghi', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + ), + RuntimeError, + ) + pages = list(client.list_taxonomies(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_taxonomies_async_pager(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + next_page_token='abc', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[], + next_page_token='def', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + ], + next_page_token='ghi', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_taxonomies(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, policytagmanager.Taxonomy) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_taxonomies_async_pages(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_taxonomies), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + next_page_token='abc', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[], + next_page_token='def', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + ], + next_page_token='ghi', + ), + policytagmanager.ListTaxonomiesResponse( + taxonomies=[ + policytagmanager.Taxonomy(), + policytagmanager.Taxonomy(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_taxonomies(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + policytagmanager.GetTaxonomyRequest, + dict, +]) +def test_get_taxonomy(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy( + name='name_value', + display_name='display_name_value', + description='description_value', + policy_tag_count=1715, + activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], + ) + response = client.get_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.GetTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.policy_tag_count == 1715 + assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] + + +def test_get_taxonomy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_taxonomy), + '__call__') as call: + client.get_taxonomy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.GetTaxonomyRequest() + +@pytest.mark.asyncio +async def test_get_taxonomy_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.GetTaxonomyRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy( + name='name_value', + display_name='display_name_value', + description='description_value', + policy_tag_count=1715, + activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], + )) + response = await client.get_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.GetTaxonomyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.Taxonomy) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.policy_tag_count == 1715 + assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] + + +@pytest.mark.asyncio +async def test_get_taxonomy_async_from_dict(): + await test_get_taxonomy_async(request_type=dict) + + +def test_get_taxonomy_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.GetTaxonomyRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_taxonomy), + '__call__') as call: + call.return_value = policytagmanager.Taxonomy() + client.get_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_taxonomy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.GetTaxonomyRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_taxonomy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) + await client.get_taxonomy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_taxonomy_flattened(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_taxonomy( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_taxonomy_flattened_error(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_taxonomy( + policytagmanager.GetTaxonomyRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_taxonomy_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_taxonomy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.Taxonomy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_taxonomy( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_taxonomy_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_taxonomy( + policytagmanager.GetTaxonomyRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + policytagmanager.CreatePolicyTagRequest, + dict, +]) +def test_create_policy_tag(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag( + name='name_value', + display_name='display_name_value', + description='description_value', + parent_policy_tag='parent_policy_tag_value', + child_policy_tags=['child_policy_tags_value'], + ) + response = client.create_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.CreatePolicyTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent_policy_tag == 'parent_policy_tag_value' + assert response.child_policy_tags == ['child_policy_tags_value'] + + +def test_create_policy_tag_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), + '__call__') as call: + client.create_policy_tag() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.CreatePolicyTagRequest() + +@pytest.mark.asyncio +async def test_create_policy_tag_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.CreatePolicyTagRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag( + name='name_value', + display_name='display_name_value', + description='description_value', + parent_policy_tag='parent_policy_tag_value', + child_policy_tags=['child_policy_tags_value'], + )) + response = await client.create_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.CreatePolicyTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent_policy_tag == 'parent_policy_tag_value' + assert response.child_policy_tags == ['child_policy_tags_value'] + + +@pytest.mark.asyncio +async def test_create_policy_tag_async_from_dict(): + await test_create_policy_tag_async(request_type=dict) + + +def test_create_policy_tag_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.CreatePolicyTagRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), + '__call__') as call: + call.return_value = policytagmanager.PolicyTag() + client.create_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_policy_tag_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.CreatePolicyTagRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) + await client.create_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_policy_tag_flattened(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_policy_tag( + parent='parent_value', + policy_tag=policytagmanager.PolicyTag(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].policy_tag + mock_val = policytagmanager.PolicyTag(name='name_value') + assert arg == mock_val + + +def test_create_policy_tag_flattened_error(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_policy_tag( + policytagmanager.CreatePolicyTagRequest(), + parent='parent_value', + policy_tag=policytagmanager.PolicyTag(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_policy_tag_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_policy_tag( + parent='parent_value', + policy_tag=policytagmanager.PolicyTag(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].policy_tag + mock_val = policytagmanager.PolicyTag(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_policy_tag_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_policy_tag( + policytagmanager.CreatePolicyTagRequest(), + parent='parent_value', + policy_tag=policytagmanager.PolicyTag(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + policytagmanager.DeletePolicyTagRequest, + dict, +]) +def test_delete_policy_tag(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.DeletePolicyTagRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_policy_tag_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), + '__call__') as call: + client.delete_policy_tag() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.DeletePolicyTagRequest() + +@pytest.mark.asyncio +async def test_delete_policy_tag_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.DeletePolicyTagRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.DeletePolicyTagRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_policy_tag_async_from_dict(): + await test_delete_policy_tag_async(request_type=dict) + + +def test_delete_policy_tag_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.DeletePolicyTagRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), + '__call__') as call: + call.return_value = None + client.delete_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_policy_tag_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.DeletePolicyTagRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_policy_tag_flattened(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_policy_tag( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_policy_tag_flattened_error(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_policy_tag( + policytagmanager.DeletePolicyTagRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_policy_tag_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_policy_tag( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_policy_tag_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_policy_tag( + policytagmanager.DeletePolicyTagRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + policytagmanager.UpdatePolicyTagRequest, + dict, +]) +def test_update_policy_tag(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag( + name='name_value', + display_name='display_name_value', + description='description_value', + parent_policy_tag='parent_policy_tag_value', + child_policy_tags=['child_policy_tags_value'], + ) + response = client.update_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.UpdatePolicyTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent_policy_tag == 'parent_policy_tag_value' + assert response.child_policy_tags == ['child_policy_tags_value'] + + +def test_update_policy_tag_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), + '__call__') as call: + client.update_policy_tag() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.UpdatePolicyTagRequest() + +@pytest.mark.asyncio +async def test_update_policy_tag_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.UpdatePolicyTagRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag( + name='name_value', + display_name='display_name_value', + description='description_value', + parent_policy_tag='parent_policy_tag_value', + child_policy_tags=['child_policy_tags_value'], + )) + response = await client.update_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.UpdatePolicyTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent_policy_tag == 'parent_policy_tag_value' + assert response.child_policy_tags == ['child_policy_tags_value'] + + +@pytest.mark.asyncio +async def test_update_policy_tag_async_from_dict(): + await test_update_policy_tag_async(request_type=dict) + + +def test_update_policy_tag_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.UpdatePolicyTagRequest() + + request.policy_tag.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), + '__call__') as call: + call.return_value = policytagmanager.PolicyTag() + client.update_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'policy_tag.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_policy_tag_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.UpdatePolicyTagRequest() + + request.policy_tag.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) + await client.update_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'policy_tag.name=name_value', + ) in kw['metadata'] + + +def test_update_policy_tag_flattened(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_policy_tag( + policy_tag=policytagmanager.PolicyTag(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].policy_tag + mock_val = policytagmanager.PolicyTag(name='name_value') + assert arg == mock_val + + +def test_update_policy_tag_flattened_error(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_policy_tag( + policytagmanager.UpdatePolicyTagRequest(), + policy_tag=policytagmanager.PolicyTag(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_update_policy_tag_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_policy_tag( + policy_tag=policytagmanager.PolicyTag(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].policy_tag + mock_val = policytagmanager.PolicyTag(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_policy_tag_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_policy_tag( + policytagmanager.UpdatePolicyTagRequest(), + policy_tag=policytagmanager.PolicyTag(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + policytagmanager.ListPolicyTagsRequest, + dict, +]) +def test_list_policy_tags(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListPolicyTagsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_policy_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.ListPolicyTagsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPolicyTagsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_policy_tags_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), + '__call__') as call: + client.list_policy_tags() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.ListPolicyTagsRequest() + +@pytest.mark.asyncio +async def test_list_policy_tags_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.ListPolicyTagsRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListPolicyTagsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_policy_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.ListPolicyTagsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPolicyTagsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_policy_tags_async_from_dict(): + await test_list_policy_tags_async(request_type=dict) + + +def test_list_policy_tags_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.ListPolicyTagsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), + '__call__') as call: + call.return_value = policytagmanager.ListPolicyTagsResponse() + client.list_policy_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_policy_tags_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.ListPolicyTagsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListPolicyTagsResponse()) + await client.list_policy_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_policy_tags_flattened(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListPolicyTagsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_policy_tags( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_policy_tags_flattened_error(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_policy_tags( + policytagmanager.ListPolicyTagsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_policy_tags_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.ListPolicyTagsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListPolicyTagsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_policy_tags( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_policy_tags_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_policy_tags( + policytagmanager.ListPolicyTagsRequest(), + parent='parent_value', + ) + + +def test_list_policy_tags_pager(transport_name: str = "grpc"): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + next_page_token='abc', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[], + next_page_token='def', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + ], + next_page_token='ghi', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_policy_tags(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, policytagmanager.PolicyTag) + for i in results) +def test_list_policy_tags_pages(transport_name: str = "grpc"): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + next_page_token='abc', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[], + next_page_token='def', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + ], + next_page_token='ghi', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + ), + RuntimeError, + ) + pages = list(client.list_policy_tags(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_policy_tags_async_pager(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + next_page_token='abc', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[], + next_page_token='def', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + ], + next_page_token='ghi', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_policy_tags(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, policytagmanager.PolicyTag) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_policy_tags_async_pages(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_policy_tags), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + next_page_token='abc', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[], + next_page_token='def', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + ], + next_page_token='ghi', + ), + policytagmanager.ListPolicyTagsResponse( + policy_tags=[ + policytagmanager.PolicyTag(), + policytagmanager.PolicyTag(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_policy_tags(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + policytagmanager.GetPolicyTagRequest, + dict, +]) +def test_get_policy_tag(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag( + name='name_value', + display_name='display_name_value', + description='description_value', + parent_policy_tag='parent_policy_tag_value', + child_policy_tags=['child_policy_tags_value'], + ) + response = client.get_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.GetPolicyTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent_policy_tag == 'parent_policy_tag_value' + assert response.child_policy_tags == ['child_policy_tags_value'] + + +def test_get_policy_tag_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_policy_tag), + '__call__') as call: + client.get_policy_tag() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.GetPolicyTagRequest() + +@pytest.mark.asyncio +async def test_get_policy_tag_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.GetPolicyTagRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag( + name='name_value', + display_name='display_name_value', + description='description_value', + parent_policy_tag='parent_policy_tag_value', + child_policy_tags=['child_policy_tags_value'], + )) + response = await client.get_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanager.GetPolicyTagRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanager.PolicyTag) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.parent_policy_tag == 'parent_policy_tag_value' + assert response.child_policy_tags == ['child_policy_tags_value'] + + +@pytest.mark.asyncio +async def test_get_policy_tag_async_from_dict(): + await test_get_policy_tag_async(request_type=dict) + + +def test_get_policy_tag_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.GetPolicyTagRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_policy_tag), + '__call__') as call: + call.return_value = policytagmanager.PolicyTag() + client.get_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_policy_tag_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanager.GetPolicyTagRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_policy_tag), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) + await client.get_policy_tag(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_policy_tag_flattened(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_policy_tag( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_policy_tag_flattened_error(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_policy_tag( + policytagmanager.GetPolicyTagRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_policy_tag_flattened_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_policy_tag), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanager.PolicyTag() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_policy_tag( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_policy_tag_flattened_error_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_policy_tag( + policytagmanager.GetPolicyTagRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.GetIamPolicyRequest, + dict, +]) +def test_get_iam_policy(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_get_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + + +def test_get_iam_policy_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_get_iam_policy_from_dict_foreign(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy(request={ + 'resource': 'resource_value', + 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.SetIamPolicyRequest, + dict, +]) +def test_set_iam_policy(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b'etag_blob', + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +def test_set_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( + version=774, + etag=b'etag_blob', + )) + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b'etag_blob' + + +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + + +def test_set_iam_policy_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_set_iam_policy_from_dict_foreign(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy(request={ + 'resource': 'resource_value', + 'policy': policy_pb2.Policy(version=774), + 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), + } + ) + call.assert_called() + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, +]) +def test_test_iam_permissions(request_type, transport: str = 'grpc'): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + ) + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +def test_test_iam_permissions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + client.test_iam_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( + permissions=['permissions_value'], + )) + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ['permissions_value'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + + +def test_test_iam_permissions_field_headers(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + +def test_test_iam_permissions_from_dict_foreign(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions(request={ + 'resource': 'resource_value', + 'permissions': ['permissions_value'], + } + ) + call.assert_called() + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PolicyTagManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PolicyTagManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.PolicyTagManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PolicyTagManagerClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PolicyTagManagerClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PolicyTagManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PolicyTagManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = PolicyTagManagerClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PolicyTagManagerGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PolicyTagManagerGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.PolicyTagManagerGrpcTransport, + transports.PolicyTagManagerGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", +]) +def test_transport_kind(transport_name): + transport = PolicyTagManagerClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.PolicyTagManagerGrpcTransport, + ) + +def test_policy_tag_manager_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.PolicyTagManagerTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_policy_tag_manager_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.datacatalog_v1beta1.services.policy_tag_manager.transports.PolicyTagManagerTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.PolicyTagManagerTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_taxonomy', + 'delete_taxonomy', + 'update_taxonomy', + 'list_taxonomies', + 'get_taxonomy', + 'create_policy_tag', + 'delete_policy_tag', + 'update_policy_tag', + 'list_policy_tags', + 'get_policy_tag', + 'get_iam_policy', + 'set_iam_policy', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_policy_tag_manager_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.datacatalog_v1beta1.services.policy_tag_manager.transports.PolicyTagManagerTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PolicyTagManagerTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_policy_tag_manager_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.datacatalog_v1beta1.services.policy_tag_manager.transports.PolicyTagManagerTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PolicyTagManagerTransport() + adc.assert_called_once() + + +def test_policy_tag_manager_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PolicyTagManagerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerGrpcTransport, + transports.PolicyTagManagerGrpcAsyncIOTransport, + ], +) +def test_policy_tag_manager_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerGrpcTransport, + transports.PolicyTagManagerGrpcAsyncIOTransport, + ], +) +def test_policy_tag_manager_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PolicyTagManagerGrpcTransport, grpc_helpers), + (transports.PolicyTagManagerGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_policy_tag_manager_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="datacatalog.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerGrpcTransport, transports.PolicyTagManagerGrpcAsyncIOTransport]) +def test_policy_tag_manager_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_policy_tag_manager_host_no_port(transport_name): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'datacatalog.googleapis.com:443' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_policy_tag_manager_host_with_port(transport_name): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'datacatalog.googleapis.com:8000' + ) + +def test_policy_tag_manager_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PolicyTagManagerGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_policy_tag_manager_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PolicyTagManagerGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerGrpcTransport, transports.PolicyTagManagerGrpcAsyncIOTransport]) +def test_policy_tag_manager_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerGrpcTransport, transports.PolicyTagManagerGrpcAsyncIOTransport]) +def test_policy_tag_manager_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_policy_tag_path(): + project = "squid" + location = "clam" + taxonomy = "whelk" + policy_tag = "octopus" + expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}/policyTags/{policy_tag}".format(project=project, location=location, taxonomy=taxonomy, policy_tag=policy_tag, ) + actual = PolicyTagManagerClient.policy_tag_path(project, location, taxonomy, policy_tag) + assert expected == actual + + +def test_parse_policy_tag_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "taxonomy": "cuttlefish", + "policy_tag": "mussel", + } + path = PolicyTagManagerClient.policy_tag_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_policy_tag_path(path) + assert expected == actual + +def test_taxonomy_path(): + project = "winkle" + location = "nautilus" + taxonomy = "scallop" + expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format(project=project, location=location, taxonomy=taxonomy, ) + actual = PolicyTagManagerClient.taxonomy_path(project, location, taxonomy) + assert expected == actual + + +def test_parse_taxonomy_path(): + expected = { + "project": "abalone", + "location": "squid", + "taxonomy": "clam", + } + path = PolicyTagManagerClient.taxonomy_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_taxonomy_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = PolicyTagManagerClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = PolicyTagManagerClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format(folder=folder, ) + actual = PolicyTagManagerClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = PolicyTagManagerClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format(organization=organization, ) + actual = PolicyTagManagerClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = PolicyTagManagerClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format(project=project, ) + actual = PolicyTagManagerClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = PolicyTagManagerClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = PolicyTagManagerClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = PolicyTagManagerClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.PolicyTagManagerTransport, '_prep_wrapped_messages') as prep: + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.PolicyTagManagerTransport, '_prep_wrapped_messages') as prep: + transport_class = PolicyTagManagerClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = PolicyTagManagerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = PolicyTagManagerClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport), + (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py new file mode 100644 index 000000000000..88d1769958e7 --- /dev/null +++ b/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py @@ -0,0 +1,1456 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization import PolicyTagManagerSerializationAsyncClient +from google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization import PolicyTagManagerSerializationClient +from google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization import transports +from google.cloud.datacatalog_v1beta1.types import policytagmanager +from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(None) is None + assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (PolicyTagManagerSerializationClient, "grpc"), + (PolicyTagManagerSerializationAsyncClient, "grpc_asyncio"), +]) +def test_policy_tag_manager_serialization_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'datacatalog.googleapis.com:443' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.PolicyTagManagerSerializationGrpcTransport, "grpc"), + (transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_policy_tag_manager_serialization_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (PolicyTagManagerSerializationClient, "grpc"), + (PolicyTagManagerSerializationAsyncClient, "grpc_asyncio"), +]) +def test_policy_tag_manager_serialization_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'datacatalog.googleapis.com:443' + ) + + +def test_policy_tag_manager_serialization_client_get_transport_class(): + transport = PolicyTagManagerSerializationClient.get_transport_class() + available_transports = [ + transports.PolicyTagManagerSerializationGrpcTransport, + ] + assert transport in available_transports + + transport = PolicyTagManagerSerializationClient.get_transport_class("grpc") + assert transport == transports.PolicyTagManagerSerializationGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc"), + (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(PolicyTagManagerSerializationClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationClient)) +@mock.patch.object(PolicyTagManagerSerializationAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationAsyncClient)) +def test_policy_tag_manager_serialization_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(PolicyTagManagerSerializationClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(PolicyTagManagerSerializationClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc", "true"), + (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc", "false"), + (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(PolicyTagManagerSerializationClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationClient)) +@mock.patch.object(PolicyTagManagerSerializationAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_policy_tag_manager_serialization_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + PolicyTagManagerSerializationClient, PolicyTagManagerSerializationAsyncClient +]) +@mock.patch.object(PolicyTagManagerSerializationClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationClient)) +@mock.patch.object(PolicyTagManagerSerializationAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationAsyncClient)) +def test_policy_tag_manager_serialization_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc"), + (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_policy_tag_manager_serialization_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc", grpc_helpers), + (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_policy_tag_manager_serialization_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_policy_tag_manager_serialization_client_client_options_from_dict(): + with mock.patch('google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = PolicyTagManagerSerializationClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc", grpc_helpers), + (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_policy_tag_manager_serialization_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="datacatalog.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + policytagmanagerserialization.ImportTaxonomiesRequest, + dict, +]) +def test_import_taxonomies(request_type, transport: str = 'grpc'): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanagerserialization.ImportTaxonomiesResponse( + ) + response = client.import_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanagerserialization.ImportTaxonomiesResponse) + + +def test_import_taxonomies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_taxonomies), + '__call__') as call: + client.import_taxonomies() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest() + +@pytest.mark.asyncio +async def test_import_taxonomies_async(transport: str = 'grpc_asyncio', request_type=policytagmanagerserialization.ImportTaxonomiesRequest): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanagerserialization.ImportTaxonomiesResponse( + )) + response = await client.import_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanagerserialization.ImportTaxonomiesResponse) + + +@pytest.mark.asyncio +async def test_import_taxonomies_async_from_dict(): + await test_import_taxonomies_async(request_type=dict) + + +def test_import_taxonomies_field_headers(): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanagerserialization.ImportTaxonomiesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_taxonomies), + '__call__') as call: + call.return_value = policytagmanagerserialization.ImportTaxonomiesResponse() + client.import_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_import_taxonomies_field_headers_async(): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanagerserialization.ImportTaxonomiesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_taxonomies), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanagerserialization.ImportTaxonomiesResponse()) + await client.import_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + policytagmanagerserialization.ExportTaxonomiesRequest, + dict, +]) +def test_export_taxonomies(request_type, transport: str = 'grpc'): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = policytagmanagerserialization.ExportTaxonomiesResponse( + ) + response = client.export_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanagerserialization.ExportTaxonomiesResponse) + + +def test_export_taxonomies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_taxonomies), + '__call__') as call: + client.export_taxonomies() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest() + +@pytest.mark.asyncio +async def test_export_taxonomies_async(transport: str = 'grpc_asyncio', request_type=policytagmanagerserialization.ExportTaxonomiesRequest): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_taxonomies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanagerserialization.ExportTaxonomiesResponse( + )) + response = await client.export_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, policytagmanagerserialization.ExportTaxonomiesResponse) + + +@pytest.mark.asyncio +async def test_export_taxonomies_async_from_dict(): + await test_export_taxonomies_async(request_type=dict) + + +def test_export_taxonomies_field_headers(): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanagerserialization.ExportTaxonomiesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_taxonomies), + '__call__') as call: + call.return_value = policytagmanagerserialization.ExportTaxonomiesResponse() + client.export_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_export_taxonomies_field_headers_async(): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = policytagmanagerserialization.ExportTaxonomiesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_taxonomies), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanagerserialization.ExportTaxonomiesResponse()) + await client.export_taxonomies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerSerializationClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PolicyTagManagerSerializationClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PolicyTagManagerSerializationClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PolicyTagManagerSerializationClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = PolicyTagManagerSerializationClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PolicyTagManagerSerializationGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.PolicyTagManagerSerializationGrpcTransport, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", +]) +def test_transport_kind(transport_name): + transport = PolicyTagManagerSerializationClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.PolicyTagManagerSerializationGrpcTransport, + ) + +def test_policy_tag_manager_serialization_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.PolicyTagManagerSerializationTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_policy_tag_manager_serialization_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.PolicyTagManagerSerializationTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'import_taxonomies', + 'export_taxonomies', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_policy_tag_manager_serialization_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PolicyTagManagerSerializationTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_policy_tag_manager_serialization_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PolicyTagManagerSerializationTransport() + adc.assert_called_once() + + +def test_policy_tag_manager_serialization_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PolicyTagManagerSerializationClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerSerializationGrpcTransport, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + ], +) +def test_policy_tag_manager_serialization_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PolicyTagManagerSerializationGrpcTransport, + transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, + ], +) +def test_policy_tag_manager_serialization_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PolicyTagManagerSerializationGrpcTransport, grpc_helpers), + (transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_policy_tag_manager_serialization_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "datacatalog.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="datacatalog.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerSerializationGrpcTransport, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport]) +def test_policy_tag_manager_serialization_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_policy_tag_manager_serialization_host_no_port(transport_name): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'datacatalog.googleapis.com:443' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_policy_tag_manager_serialization_host_with_port(transport_name): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'datacatalog.googleapis.com:8000' + ) + +def test_policy_tag_manager_serialization_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PolicyTagManagerSerializationGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_policy_tag_manager_serialization_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PolicyTagManagerSerializationGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerSerializationGrpcTransport, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport]) +def test_policy_tag_manager_serialization_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerSerializationGrpcTransport, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport]) +def test_policy_tag_manager_serialization_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_taxonomy_path(): + project = "squid" + location = "clam" + taxonomy = "whelk" + expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format(project=project, location=location, taxonomy=taxonomy, ) + actual = PolicyTagManagerSerializationClient.taxonomy_path(project, location, taxonomy) + assert expected == actual + + +def test_parse_taxonomy_path(): + expected = { + "project": "octopus", + "location": "oyster", + "taxonomy": "nudibranch", + } + path = PolicyTagManagerSerializationClient.taxonomy_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_taxonomy_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = PolicyTagManagerSerializationClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = PolicyTagManagerSerializationClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format(folder=folder, ) + actual = PolicyTagManagerSerializationClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = PolicyTagManagerSerializationClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format(organization=organization, ) + actual = PolicyTagManagerSerializationClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = PolicyTagManagerSerializationClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format(project=project, ) + actual = PolicyTagManagerSerializationClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = PolicyTagManagerSerializationClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = PolicyTagManagerSerializationClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = PolicyTagManagerSerializationClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PolicyTagManagerSerializationClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.PolicyTagManagerSerializationTransport, '_prep_wrapped_messages') as prep: + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.PolicyTagManagerSerializationTransport, '_prep_wrapped_messages') as prep: + transport_class = PolicyTagManagerSerializationClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = PolicyTagManagerSerializationAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = PolicyTagManagerSerializationClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport), + (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From 65d313fcc6859d01afa4b1e362e4378c86ce4785 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 26 Sep 2023 21:55:24 +0000 Subject: [PATCH 2/3] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .../google-cloud-datacatalog/v1/.coveragerc | 13 - .../google-cloud-datacatalog/v1/.flake8 | 33 - .../google-cloud-datacatalog/v1/MANIFEST.in | 2 - .../google-cloud-datacatalog/v1/README.rst | 49 - .../v1/docs/_static/custom.css | 3 - .../google-cloud-datacatalog/v1/docs/conf.py | 376 - .../v1/docs/datacatalog_v1/data_catalog.rst | 10 - .../datacatalog_v1/policy_tag_manager.rst | 10 - .../policy_tag_manager_serialization.rst | 6 - .../v1/docs/datacatalog_v1/services.rst | 8 - .../v1/docs/datacatalog_v1/types.rst | 6 - .../v1/docs/index.rst | 7 - .../v1/google/cloud/datacatalog/__init__.py | 273 - .../google/cloud/datacatalog/gapic_version.py | 16 - .../v1/google/cloud/datacatalog/py.typed | 2 - .../google/cloud/datacatalog_v1/__init__.py | 274 - .../cloud/datacatalog_v1/gapic_metadata.json | 551 - .../cloud/datacatalog_v1/gapic_version.py | 16 - .../v1/google/cloud/datacatalog_v1/py.typed | 2 - .../cloud/datacatalog_v1/services/__init__.py | 15 - .../services/data_catalog/__init__.py | 22 - .../services/data_catalog/async_client.py | 4510 ------- .../services/data_catalog/client.py | 4761 ------- .../services/data_catalog/pagers.py | 504 - .../data_catalog/transports/__init__.py | 33 - .../services/data_catalog/transports/base.py | 657 - .../services/data_catalog/transports/grpc.py | 1469 --- .../data_catalog/transports/grpc_asyncio.py | 1468 --- .../services/policy_tag_manager/__init__.py | 22 - .../policy_tag_manager/async_client.py | 1819 --- .../services/policy_tag_manager/client.py | 2029 --- .../services/policy_tag_manager/pagers.py | 260 - .../policy_tag_manager/transports/__init__.py | 33 - .../policy_tag_manager/transports/base.py | 356 - .../policy_tag_manager/transports/grpc.py | 671 - .../transports/grpc_asyncio.py | 670 - .../__init__.py | 22 - .../async_client.py | 699 -- .../client.py | 906 -- .../transports/__init__.py | 33 - .../transports/base.py | 216 - .../transports/grpc.py | 422 - .../transports/grpc_asyncio.py | 421 - .../cloud/datacatalog_v1/types/__init__.py | 288 - .../cloud/datacatalog_v1/types/bigquery.py | 136 - .../cloud/datacatalog_v1/types/common.py | 112 - .../cloud/datacatalog_v1/types/data_source.py | 132 - .../cloud/datacatalog_v1/types/datacatalog.py | 2727 ---- .../datacatalog_v1/types/dataplex_spec.py | 170 - .../datacatalog_v1/types/dump_content.py | 95 - .../datacatalog_v1/types/gcs_fileset_spec.py | 119 - .../datacatalog_v1/types/physical_schema.py | 158 - .../datacatalog_v1/types/policytagmanager.py | 551 - .../types/policytagmanagerserialization.py | 288 - .../cloud/datacatalog_v1/types/schema.py | 204 - .../cloud/datacatalog_v1/types/search.py | 183 - .../cloud/datacatalog_v1/types/table_spec.py | 178 - .../google/cloud/datacatalog_v1/types/tags.py | 466 - .../cloud/datacatalog_v1/types/timestamps.py | 72 - .../cloud/datacatalog_v1/types/usage.py | 156 - .../google-cloud-datacatalog/v1/mypy.ini | 3 - .../google-cloud-datacatalog/v1/noxfile.py | 184 - ...nerated_data_catalog_create_entry_async.py | 59 - ...d_data_catalog_create_entry_group_async.py | 53 - ...ed_data_catalog_create_entry_group_sync.py | 53 - ...enerated_data_catalog_create_entry_sync.py | 59 - ...generated_data_catalog_create_tag_async.py | 57 - ..._generated_data_catalog_create_tag_sync.py | 57 - ..._data_catalog_create_tag_template_async.py | 53 - ...catalog_create_tag_template_field_async.py | 57 - ..._catalog_create_tag_template_field_sync.py | 57 - ...d_data_catalog_create_tag_template_sync.py | 53 - ...nerated_data_catalog_delete_entry_async.py | 50 - ...d_data_catalog_delete_entry_group_async.py | 50 - ...ed_data_catalog_delete_entry_group_sync.py | 50 - ...enerated_data_catalog_delete_entry_sync.py | 50 - ...generated_data_catalog_delete_tag_async.py | 50 - ..._generated_data_catalog_delete_tag_sync.py | 50 - ..._data_catalog_delete_tag_template_async.py | 51 - ...catalog_delete_tag_template_field_async.py | 51 - ..._catalog_delete_tag_template_field_sync.py | 51 - ...d_data_catalog_delete_tag_template_sync.py | 51 - ..._generated_data_catalog_get_entry_async.py | 52 - ...ated_data_catalog_get_entry_group_async.py | 52 - ...rated_data_catalog_get_entry_group_sync.py | 52 - ...1_generated_data_catalog_get_entry_sync.py | 52 - ...rated_data_catalog_get_iam_policy_async.py | 53 - ...erated_data_catalog_get_iam_policy_sync.py | 53 - ...ted_data_catalog_get_tag_template_async.py | 52 - ...ated_data_catalog_get_tag_template_sync.py | 52 - ...rated_data_catalog_import_entries_async.py | 57 - ...erated_data_catalog_import_entries_sync.py | 57 - ...nerated_data_catalog_list_entries_async.py | 53 - ...enerated_data_catalog_list_entries_sync.py | 53 - ...ed_data_catalog_list_entry_groups_async.py | 53 - ...ted_data_catalog_list_entry_groups_sync.py | 53 - ..._generated_data_catalog_list_tags_async.py | 53 - ...1_generated_data_catalog_list_tags_sync.py | 53 - ...nerated_data_catalog_lookup_entry_async.py | 52 - ...enerated_data_catalog_lookup_entry_sync.py | 52 - ...ata_catalog_modify_entry_contacts_async.py | 52 - ...data_catalog_modify_entry_contacts_sync.py | 52 - ...ata_catalog_modify_entry_overview_async.py | 52 - ...data_catalog_modify_entry_overview_sync.py | 52 - ...rated_data_catalog_reconcile_tags_async.py | 57 - ...erated_data_catalog_reconcile_tags_sync.py | 57 - ...catalog_rename_tag_template_field_async.py | 53 - ...ame_tag_template_field_enum_value_async.py | 53 - ...name_tag_template_field_enum_value_sync.py | 53 - ..._catalog_rename_tag_template_field_sync.py | 53 - ...rated_data_catalog_search_catalog_async.py | 52 - ...erated_data_catalog_search_catalog_sync.py | 52 - ...rated_data_catalog_set_iam_policy_async.py | 53 - ...erated_data_catalog_set_iam_policy_sync.py | 53 - ...generated_data_catalog_star_entry_async.py | 52 - ..._generated_data_catalog_star_entry_sync.py | 52 - ...data_catalog_test_iam_permissions_async.py | 54 - ..._data_catalog_test_iam_permissions_sync.py | 54 - ...nerated_data_catalog_unstar_entry_async.py | 52 - ...enerated_data_catalog_unstar_entry_sync.py | 52 - ...nerated_data_catalog_update_entry_async.py | 57 - ...d_data_catalog_update_entry_group_async.py | 51 - ...ed_data_catalog_update_entry_group_sync.py | 51 - ...enerated_data_catalog_update_entry_sync.py | 57 - ...generated_data_catalog_update_tag_async.py | 56 - ..._generated_data_catalog_update_tag_sync.py | 56 - ..._data_catalog_update_tag_template_async.py | 51 - ...catalog_update_tag_template_field_async.py | 56 - ..._catalog_update_tag_template_field_sync.py | 56 - ...d_data_catalog_update_tag_template_sync.py | 51 - ...icy_tag_manager_create_policy_tag_async.py | 52 - ...licy_tag_manager_create_policy_tag_sync.py | 52 - ...olicy_tag_manager_create_taxonomy_async.py | 52 - ...policy_tag_manager_create_taxonomy_sync.py | 52 - ...icy_tag_manager_delete_policy_tag_async.py | 50 - ...licy_tag_manager_delete_policy_tag_sync.py | 50 - ...olicy_tag_manager_delete_taxonomy_async.py | 50 - ...policy_tag_manager_delete_taxonomy_sync.py | 50 - ...policy_tag_manager_get_iam_policy_async.py | 53 - ..._policy_tag_manager_get_iam_policy_sync.py | 53 - ...policy_tag_manager_get_policy_tag_async.py | 52 - ..._policy_tag_manager_get_policy_tag_sync.py | 52 - ...d_policy_tag_manager_get_taxonomy_async.py | 52 - ...ed_policy_tag_manager_get_taxonomy_sync.py | 52 - ...licy_tag_manager_list_policy_tags_async.py | 53 - ...olicy_tag_manager_list_policy_tags_sync.py | 53 - ...olicy_tag_manager_list_taxonomies_async.py | 53 - ...policy_tag_manager_list_taxonomies_sync.py | 53 - ...r_serialization_export_taxonomies_async.py | 54 - ...er_serialization_export_taxonomies_sync.py | 54 - ...r_serialization_import_taxonomies_async.py | 56 - ...er_serialization_import_taxonomies_sync.py | 56 - ...er_serialization_replace_taxonomy_async.py | 56 - ...ger_serialization_replace_taxonomy_sync.py | 56 - ...policy_tag_manager_set_iam_policy_async.py | 53 - ..._policy_tag_manager_set_iam_policy_sync.py | 53 - ..._tag_manager_test_iam_permissions_async.py | 54 - ...y_tag_manager_test_iam_permissions_sync.py | 54 - ...icy_tag_manager_update_policy_tag_async.py | 51 - ...licy_tag_manager_update_policy_tag_sync.py | 51 - ...olicy_tag_manager_update_taxonomy_async.py | 51 - ...policy_tag_manager_update_taxonomy_sync.py | 51 - ..._metadata_google.cloud.datacatalog.v1.json | 8111 ------------ .../scripts/fixup_datacatalog_v1_keywords.py | 222 - .../google-cloud-datacatalog/v1/setup.py | 91 - .../v1/testing/constraints-3.10.txt | 7 - .../v1/testing/constraints-3.11.txt | 7 - .../v1/testing/constraints-3.12.txt | 7 - .../v1/testing/constraints-3.7.txt | 10 - .../v1/testing/constraints-3.8.txt | 7 - .../v1/testing/constraints-3.9.txt | 7 - .../v1/tests/__init__.py | 16 - .../v1/tests/unit/__init__.py | 16 - .../v1/tests/unit/gapic/__init__.py | 16 - .../unit/gapic/datacatalog_v1/__init__.py | 16 - .../gapic/datacatalog_v1/test_data_catalog.py | 10354 ---------------- .../datacatalog_v1/test_policy_tag_manager.py | 5041 -------- .../test_policy_tag_manager_serialization.py | 2144 ---- .../v1beta1/.coveragerc | 13 - .../google-cloud-datacatalog/v1beta1/.flake8 | 33 - .../v1beta1/MANIFEST.in | 2 - .../v1beta1/README.rst | 49 - .../v1beta1/docs/_static/custom.css | 3 - .../v1beta1/docs/conf.py | 376 - .../docs/datacatalog_v1beta1/data_catalog.rst | 10 - .../policy_tag_manager.rst | 10 - .../policy_tag_manager_serialization.rst | 6 - .../docs/datacatalog_v1beta1/services.rst | 8 - .../docs/datacatalog_v1beta1/types.rst | 6 - .../v1beta1/docs/index.rst | 7 - .../google/cloud/datacatalog/__init__.py | 183 - .../google/cloud/datacatalog/gapic_version.py | 16 - .../v1beta1/google/cloud/datacatalog/py.typed | 2 - .../cloud/datacatalog_v1beta1/__init__.py | 184 - .../datacatalog_v1beta1/gapic_metadata.json | 481 - .../datacatalog_v1beta1/gapic_version.py | 16 - .../google/cloud/datacatalog_v1beta1/py.typed | 2 - .../datacatalog_v1beta1/services/__init__.py | 15 - .../services/data_catalog/__init__.py | 22 - .../services/data_catalog/async_client.py | 3653 ------ .../services/data_catalog/client.py | 3904 ------ .../services/data_catalog/pagers.py | 504 - .../data_catalog/transports/__init__.py | 33 - .../services/data_catalog/transports/base.py | 531 - .../services/data_catalog/transports/grpc.py | 1122 -- .../data_catalog/transports/grpc_asyncio.py | 1121 -- .../services/policy_tag_manager/__init__.py | 22 - .../policy_tag_manager/async_client.py | 1582 --- .../services/policy_tag_manager/client.py | 1796 --- .../services/policy_tag_manager/pagers.py | 260 - .../policy_tag_manager/transports/__init__.py | 33 - .../policy_tag_manager/transports/base.py | 320 - .../policy_tag_manager/transports/grpc.py | 586 - .../transports/grpc_asyncio.py | 585 - .../__init__.py | 22 - .../async_client.py | 380 - .../client.py | 590 - .../transports/__init__.py | 33 - .../transports/base.py | 165 - .../transports/grpc.py | 303 - .../transports/grpc_asyncio.py | 302 - .../datacatalog_v1beta1/types/__init__.py | 184 - .../cloud/datacatalog_v1beta1/types/common.py | 66 - .../datacatalog_v1beta1/types/datacatalog.py | 1363 -- .../types/gcs_fileset_spec.py | 117 - .../types/policytagmanager.py | 520 - .../types/policytagmanagerserialization.py | 234 - .../cloud/datacatalog_v1beta1/types/schema.py | 93 - .../cloud/datacatalog_v1beta1/types/search.py | 114 - .../datacatalog_v1beta1/types/table_spec.py | 165 - .../cloud/datacatalog_v1beta1/types/tags.py | 407 - .../datacatalog_v1beta1/types/timestamps.py | 67 - .../cloud/datacatalog_v1beta1/types/usage.py | 104 - .../google-cloud-datacatalog/v1beta1/mypy.ini | 3 - .../v1beta1/noxfile.py | 184 - ...nerated_data_catalog_create_entry_async.py | 59 - ...d_data_catalog_create_entry_group_async.py | 53 - ...ed_data_catalog_create_entry_group_sync.py | 53 - ...enerated_data_catalog_create_entry_sync.py | 59 - ...generated_data_catalog_create_tag_async.py | 57 - ..._generated_data_catalog_create_tag_sync.py | 57 - ..._data_catalog_create_tag_template_async.py | 53 - ...catalog_create_tag_template_field_async.py | 57 - ..._catalog_create_tag_template_field_sync.py | 57 - ...d_data_catalog_create_tag_template_sync.py | 53 - ...nerated_data_catalog_delete_entry_async.py | 50 - ...d_data_catalog_delete_entry_group_async.py | 50 - ...ed_data_catalog_delete_entry_group_sync.py | 50 - ...enerated_data_catalog_delete_entry_sync.py | 50 - ...generated_data_catalog_delete_tag_async.py | 50 - ..._generated_data_catalog_delete_tag_sync.py | 50 - ..._data_catalog_delete_tag_template_async.py | 51 - ...catalog_delete_tag_template_field_async.py | 51 - ..._catalog_delete_tag_template_field_sync.py | 51 - ...d_data_catalog_delete_tag_template_sync.py | 51 - ..._generated_data_catalog_get_entry_async.py | 52 - ...ated_data_catalog_get_entry_group_async.py | 52 - ...rated_data_catalog_get_entry_group_sync.py | 52 - ...1_generated_data_catalog_get_entry_sync.py | 52 - ...rated_data_catalog_get_iam_policy_async.py | 53 - ...erated_data_catalog_get_iam_policy_sync.py | 53 - ...ted_data_catalog_get_tag_template_async.py | 52 - ...ated_data_catalog_get_tag_template_sync.py | 52 - ...nerated_data_catalog_list_entries_async.py | 53 - ...enerated_data_catalog_list_entries_sync.py | 53 - ...ed_data_catalog_list_entry_groups_async.py | 53 - ...ted_data_catalog_list_entry_groups_sync.py | 53 - ..._generated_data_catalog_list_tags_async.py | 53 - ...1_generated_data_catalog_list_tags_sync.py | 53 - ...nerated_data_catalog_lookup_entry_async.py | 52 - ...enerated_data_catalog_lookup_entry_sync.py | 52 - ...catalog_rename_tag_template_field_async.py | 53 - ...ame_tag_template_field_enum_value_async.py | 53 - ...name_tag_template_field_enum_value_sync.py | 53 - ..._catalog_rename_tag_template_field_sync.py | 53 - ...rated_data_catalog_search_catalog_async.py | 52 - ...erated_data_catalog_search_catalog_sync.py | 52 - ...rated_data_catalog_set_iam_policy_async.py | 53 - ...erated_data_catalog_set_iam_policy_sync.py | 53 - ...data_catalog_test_iam_permissions_async.py | 54 - ..._data_catalog_test_iam_permissions_sync.py | 54 - ...nerated_data_catalog_update_entry_async.py | 57 - ...d_data_catalog_update_entry_group_async.py | 51 - ...ed_data_catalog_update_entry_group_sync.py | 51 - ...enerated_data_catalog_update_entry_sync.py | 57 - ...generated_data_catalog_update_tag_async.py | 56 - ..._generated_data_catalog_update_tag_sync.py | 56 - ..._data_catalog_update_tag_template_async.py | 51 - ...catalog_update_tag_template_field_async.py | 56 - ..._catalog_update_tag_template_field_sync.py | 56 - ...d_data_catalog_update_tag_template_sync.py | 51 - ...icy_tag_manager_create_policy_tag_async.py | 52 - ...licy_tag_manager_create_policy_tag_sync.py | 52 - ...olicy_tag_manager_create_taxonomy_async.py | 52 - ...policy_tag_manager_create_taxonomy_sync.py | 52 - ...icy_tag_manager_delete_policy_tag_async.py | 50 - ...licy_tag_manager_delete_policy_tag_sync.py | 50 - ...olicy_tag_manager_delete_taxonomy_async.py | 50 - ...policy_tag_manager_delete_taxonomy_sync.py | 50 - ...policy_tag_manager_get_iam_policy_async.py | 53 - ..._policy_tag_manager_get_iam_policy_sync.py | 53 - ...policy_tag_manager_get_policy_tag_async.py | 52 - ..._policy_tag_manager_get_policy_tag_sync.py | 52 - ...d_policy_tag_manager_get_taxonomy_async.py | 52 - ...ed_policy_tag_manager_get_taxonomy_sync.py | 52 - ...licy_tag_manager_list_policy_tags_async.py | 53 - ...olicy_tag_manager_list_policy_tags_sync.py | 53 - ...olicy_tag_manager_list_taxonomies_async.py | 53 - ...policy_tag_manager_list_taxonomies_sync.py | 53 - ...r_serialization_export_taxonomies_async.py | 54 - ...er_serialization_export_taxonomies_sync.py | 54 - ...r_serialization_import_taxonomies_async.py | 56 - ...er_serialization_import_taxonomies_sync.py | 56 - ...policy_tag_manager_set_iam_policy_async.py | 53 - ..._policy_tag_manager_set_iam_policy_sync.py | 53 - ..._tag_manager_test_iam_permissions_async.py | 54 - ...y_tag_manager_test_iam_permissions_sync.py | 54 - ...icy_tag_manager_update_policy_tag_async.py | 51 - ...licy_tag_manager_update_policy_tag_sync.py | 51 - ...olicy_tag_manager_update_taxonomy_async.py | 51 - ...policy_tag_manager_update_taxonomy_sync.py | 51 - ...data_google.cloud.datacatalog.v1beta1.json | 7024 ----------- .../fixup_datacatalog_v1beta1_keywords.py | 215 - .../google-cloud-datacatalog/v1beta1/setup.py | 91 - .../v1beta1/testing/constraints-3.10.txt | 7 - .../v1beta1/testing/constraints-3.11.txt | 7 - .../v1beta1/testing/constraints-3.12.txt | 7 - .../v1beta1/testing/constraints-3.7.txt | 10 - .../v1beta1/testing/constraints-3.8.txt | 7 - .../v1beta1/testing/constraints-3.9.txt | 7 - .../v1beta1/tests/__init__.py | 16 - .../v1beta1/tests/unit/__init__.py | 16 - .../v1beta1/tests/unit/gapic/__init__.py | 16 - .../gapic/datacatalog_v1beta1/__init__.py | 16 - .../datacatalog_v1beta1/test_data_catalog.py | 8709 ------------- .../test_policy_tag_manager.py | 4521 ------- .../test_policy_tag_manager_serialization.py | 1456 --- .../google-cloud-datacatalog/CONTRIBUTING.rst | 4 +- .../google/cloud/datacatalog/__init__.py | 10 + .../google/cloud/datacatalog/gapic_version.py | 2 +- .../google/cloud/datacatalog_v1/__init__.py | 10 + .../cloud/datacatalog_v1/gapic_version.py | 2 +- .../services/data_catalog/async_client.py | 4 +- .../services/data_catalog/client.py | 4 +- .../cloud/datacatalog_v1/types/__init__.py | 10 + .../cloud/datacatalog_v1/types/common.py | 3 + .../cloud/datacatalog_v1/types/datacatalog.py | 239 + .../datacatalog_v1beta1/gapic_version.py | 2 +- .../cloud/datacatalog_v1beta1/types/usage.py | 2 +- packages/google-cloud-datacatalog/noxfile.py | 23 +- ...nerated_data_catalog_create_entry_async.py | 2 +- ...enerated_data_catalog_create_entry_sync.py | 2 +- ...nerated_data_catalog_update_entry_async.py | 2 +- ...enerated_data_catalog_update_entry_sync.py | 2 +- ..._metadata_google.cloud.datacatalog.v1.json | 2 +- ...data_google.cloud.datacatalog.v1beta1.json | 2 +- 356 files changed, 289 insertions(+), 110892 deletions(-) delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/.coveragerc delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/.flake8 delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/MANIFEST.in delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/README.rst delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/docs/_static/custom.css delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/docs/conf.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/data_catalog.rst delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/policy_tag_manager.rst delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/policy_tag_manager_serialization.rst delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/services.rst delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/types.rst delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/docs/index.rst delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/gapic_version.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/py.typed delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/gapic_version.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/py.typed delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/async_client.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/client.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/pagers.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/pagers.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/base.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/bigquery.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/common.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/data_source.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/datacatalog.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/dataplex_spec.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/dump_content.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/gcs_fileset_spec.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/physical_schema.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/policytagmanager.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/policytagmanagerserialization.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/schema.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/search.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/table_spec.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/tags.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/timestamps.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/usage.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/mypy.ini delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/noxfile.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_group_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_group_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_field_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_field_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_group_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_group_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_field_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_field_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_group_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_group_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_iam_policy_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_iam_policy_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_tag_template_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_tag_template_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_import_entries_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_import_entries_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entries_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entries_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entry_groups_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entry_groups_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_tags_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_tags_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_lookup_entry_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_lookup_entry_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_contacts_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_contacts_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_overview_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_overview_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_reconcile_tags_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_reconcile_tags_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_search_catalog_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_search_catalog_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_iam_policy_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_iam_policy_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_star_entry_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_star_entry_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_test_iam_permissions_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_test_iam_permissions_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_unstar_entry_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_unstar_entry_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_group_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_group_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_field_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_field_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_policy_tag_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_policy_tag_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_taxonomy_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_taxonomy_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_iam_policy_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_iam_policy_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_policy_tag_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_policy_tag_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_taxonomy_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_taxonomy_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_policy_tags_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_policy_tags_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_taxonomies_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_taxonomies_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_set_iam_policy_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_set_iam_policy_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_policy_tag_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_policy_tag_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_taxonomy_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_taxonomy_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/scripts/fixup_datacatalog_v1_keywords.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/setup.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/tests/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_data_catalog.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/.coveragerc delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/.flake8 delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/MANIFEST.in delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/README.rst delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/_static/custom.css delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/conf.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/data_catalog.rst delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/policy_tag_manager.rst delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/policy_tag_manager_serialization.rst delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/services.rst delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/types.rst delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/index.rst delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/gapic_version.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/py.typed delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/gapic_metadata.json delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/gapic_version.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/py.typed delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/pagers.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/pagers.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/common.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/datacatalog.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/gcs_fileset_spec.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/policytagmanager.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/schema.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/search.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/table_spec.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/tags.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/timestamps.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/usage.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/mypy.ini delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/noxfile.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_group_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_group_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_group_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_group_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_group_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_group_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_iam_policy_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_iam_policy_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_tag_template_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_tag_template_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entries_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entries_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entry_groups_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entry_groups_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_tags_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_tags_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_lookup_entry_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_lookup_entry_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_search_catalog_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_search_catalog_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_set_iam_policy_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_set_iam_policy_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_group_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_group_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_async.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_sync.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/scripts/fixup_datacatalog_v1beta1_keywords.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/setup.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.12.txt delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/__init__.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py delete mode 100644 owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/.coveragerc b/owl-bot-staging/google-cloud-datacatalog/v1/.coveragerc deleted file mode 100644 index 8d9d83e17533..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/datacatalog/__init__.py - google/cloud/datacatalog/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/.flake8 b/owl-bot-staging/google-cloud-datacatalog/v1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/MANIFEST.in b/owl-bot-staging/google-cloud-datacatalog/v1/MANIFEST.in deleted file mode 100644 index 7344043d20a5..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/datacatalog *.py -recursive-include google/cloud/datacatalog_v1 *.py diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/README.rst b/owl-bot-staging/google-cloud-datacatalog/v1/README.rst deleted file mode 100644 index 8f53b24416fc..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Datacatalog API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Datacatalog API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-datacatalog/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/docs/conf.py b/owl-bot-staging/google-cloud-datacatalog/v1/docs/conf.py deleted file mode 100644 index aec9c23130e4..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-datacatalog documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-datacatalog" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-datacatalog-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-datacatalog.tex", - u"google-cloud-datacatalog Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-datacatalog", - u"Google Cloud Datacatalog Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-datacatalog", - u"google-cloud-datacatalog Documentation", - author, - "google-cloud-datacatalog", - "GAPIC library for Google Cloud Datacatalog API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/data_catalog.rst b/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/data_catalog.rst deleted file mode 100644 index 6141a1c7bc43..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/data_catalog.rst +++ /dev/null @@ -1,10 +0,0 @@ -DataCatalog ------------------------------ - -.. automodule:: google.cloud.datacatalog_v1.services.data_catalog - :members: - :inherited-members: - -.. automodule:: google.cloud.datacatalog_v1.services.data_catalog.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/policy_tag_manager.rst b/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/policy_tag_manager.rst deleted file mode 100644 index 03d2846ff2e8..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/policy_tag_manager.rst +++ /dev/null @@ -1,10 +0,0 @@ -PolicyTagManager ----------------------------------- - -.. automodule:: google.cloud.datacatalog_v1.services.policy_tag_manager - :members: - :inherited-members: - -.. automodule:: google.cloud.datacatalog_v1.services.policy_tag_manager.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/policy_tag_manager_serialization.rst b/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/policy_tag_manager_serialization.rst deleted file mode 100644 index f7006d1b0825..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/policy_tag_manager_serialization.rst +++ /dev/null @@ -1,6 +0,0 @@ -PolicyTagManagerSerialization ------------------------------------------------ - -.. automodule:: google.cloud.datacatalog_v1.services.policy_tag_manager_serialization - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/services.rst b/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/services.rst deleted file mode 100644 index a70d3132fd05..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/services.rst +++ /dev/null @@ -1,8 +0,0 @@ -Services for Google Cloud Datacatalog v1 API -============================================ -.. toctree:: - :maxdepth: 2 - - data_catalog - policy_tag_manager - policy_tag_manager_serialization diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/types.rst b/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/types.rst deleted file mode 100644 index 19f12ef87fd9..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/docs/datacatalog_v1/types.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Datacatalog v1 API -========================================= - -.. automodule:: google.cloud.datacatalog_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/docs/index.rst b/owl-bot-staging/google-cloud-datacatalog/v1/docs/index.rst deleted file mode 100644 index 7af5288574f0..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - datacatalog_v1/services - datacatalog_v1/types diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/__init__.py deleted file mode 100644 index e667ef3fd25e..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/__init__.py +++ /dev/null @@ -1,273 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.datacatalog import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.datacatalog_v1.services.data_catalog.client import DataCatalogClient -from google.cloud.datacatalog_v1.services.data_catalog.async_client import DataCatalogAsyncClient -from google.cloud.datacatalog_v1.services.policy_tag_manager.client import PolicyTagManagerClient -from google.cloud.datacatalog_v1.services.policy_tag_manager.async_client import PolicyTagManagerAsyncClient -from google.cloud.datacatalog_v1.services.policy_tag_manager_serialization.client import PolicyTagManagerSerializationClient -from google.cloud.datacatalog_v1.services.policy_tag_manager_serialization.async_client import PolicyTagManagerSerializationAsyncClient - -from google.cloud.datacatalog_v1.types.bigquery import BigQueryConnectionSpec -from google.cloud.datacatalog_v1.types.bigquery import BigQueryRoutineSpec -from google.cloud.datacatalog_v1.types.bigquery import CloudSqlBigQueryConnectionSpec -from google.cloud.datacatalog_v1.types.common import PersonalDetails -from google.cloud.datacatalog_v1.types.common import IntegratedSystem -from google.cloud.datacatalog_v1.types.common import ManagingSystem -from google.cloud.datacatalog_v1.types.data_source import DataSource -from google.cloud.datacatalog_v1.types.data_source import StorageProperties -from google.cloud.datacatalog_v1.types.datacatalog import BusinessContext -from google.cloud.datacatalog_v1.types.datacatalog import CloudBigtableInstanceSpec -from google.cloud.datacatalog_v1.types.datacatalog import CloudBigtableSystemSpec -from google.cloud.datacatalog_v1.types.datacatalog import Contacts -from google.cloud.datacatalog_v1.types.datacatalog import CreateEntryGroupRequest -from google.cloud.datacatalog_v1.types.datacatalog import CreateEntryRequest -from google.cloud.datacatalog_v1.types.datacatalog import CreateTagRequest -from google.cloud.datacatalog_v1.types.datacatalog import CreateTagTemplateFieldRequest -from google.cloud.datacatalog_v1.types.datacatalog import CreateTagTemplateRequest -from google.cloud.datacatalog_v1.types.datacatalog import DatabaseTableSpec -from google.cloud.datacatalog_v1.types.datacatalog import DatasetSpec -from google.cloud.datacatalog_v1.types.datacatalog import DataSourceConnectionSpec -from google.cloud.datacatalog_v1.types.datacatalog import DeleteEntryGroupRequest -from google.cloud.datacatalog_v1.types.datacatalog import DeleteEntryRequest -from google.cloud.datacatalog_v1.types.datacatalog import DeleteTagRequest -from google.cloud.datacatalog_v1.types.datacatalog import DeleteTagTemplateFieldRequest -from google.cloud.datacatalog_v1.types.datacatalog import DeleteTagTemplateRequest -from google.cloud.datacatalog_v1.types.datacatalog import Entry -from google.cloud.datacatalog_v1.types.datacatalog import EntryGroup -from google.cloud.datacatalog_v1.types.datacatalog import EntryOverview -from google.cloud.datacatalog_v1.types.datacatalog import FilesetSpec -from google.cloud.datacatalog_v1.types.datacatalog import GetEntryGroupRequest -from google.cloud.datacatalog_v1.types.datacatalog import GetEntryRequest -from google.cloud.datacatalog_v1.types.datacatalog import GetTagTemplateRequest -from google.cloud.datacatalog_v1.types.datacatalog import ImportEntriesMetadata -from google.cloud.datacatalog_v1.types.datacatalog import ImportEntriesRequest -from google.cloud.datacatalog_v1.types.datacatalog import ImportEntriesResponse -from google.cloud.datacatalog_v1.types.datacatalog import ListEntriesRequest -from google.cloud.datacatalog_v1.types.datacatalog import ListEntriesResponse -from google.cloud.datacatalog_v1.types.datacatalog import ListEntryGroupsRequest -from google.cloud.datacatalog_v1.types.datacatalog import ListEntryGroupsResponse -from google.cloud.datacatalog_v1.types.datacatalog import ListTagsRequest -from google.cloud.datacatalog_v1.types.datacatalog import ListTagsResponse -from google.cloud.datacatalog_v1.types.datacatalog import LookerSystemSpec -from google.cloud.datacatalog_v1.types.datacatalog import LookupEntryRequest -from google.cloud.datacatalog_v1.types.datacatalog import ModelSpec -from google.cloud.datacatalog_v1.types.datacatalog import ModifyEntryContactsRequest -from google.cloud.datacatalog_v1.types.datacatalog import ModifyEntryOverviewRequest -from google.cloud.datacatalog_v1.types.datacatalog import ReconcileTagsMetadata -from google.cloud.datacatalog_v1.types.datacatalog import ReconcileTagsRequest -from google.cloud.datacatalog_v1.types.datacatalog import ReconcileTagsResponse -from google.cloud.datacatalog_v1.types.datacatalog import RenameTagTemplateFieldEnumValueRequest -from google.cloud.datacatalog_v1.types.datacatalog import RenameTagTemplateFieldRequest -from google.cloud.datacatalog_v1.types.datacatalog import RoutineSpec -from google.cloud.datacatalog_v1.types.datacatalog import SearchCatalogRequest -from google.cloud.datacatalog_v1.types.datacatalog import SearchCatalogResponse -from google.cloud.datacatalog_v1.types.datacatalog import ServiceSpec -from google.cloud.datacatalog_v1.types.datacatalog import SqlDatabaseSystemSpec -from google.cloud.datacatalog_v1.types.datacatalog import StarEntryRequest -from google.cloud.datacatalog_v1.types.datacatalog import StarEntryResponse -from google.cloud.datacatalog_v1.types.datacatalog import UnstarEntryRequest -from google.cloud.datacatalog_v1.types.datacatalog import UnstarEntryResponse -from google.cloud.datacatalog_v1.types.datacatalog import UpdateEntryGroupRequest -from google.cloud.datacatalog_v1.types.datacatalog import UpdateEntryRequest -from google.cloud.datacatalog_v1.types.datacatalog import UpdateTagRequest -from google.cloud.datacatalog_v1.types.datacatalog import UpdateTagTemplateFieldRequest -from google.cloud.datacatalog_v1.types.datacatalog import UpdateTagTemplateRequest -from google.cloud.datacatalog_v1.types.datacatalog import VertexDatasetSpec -from google.cloud.datacatalog_v1.types.datacatalog import VertexModelSourceInfo -from google.cloud.datacatalog_v1.types.datacatalog import VertexModelSpec -from google.cloud.datacatalog_v1.types.datacatalog import EntryType -from google.cloud.datacatalog_v1.types.dataplex_spec import DataplexExternalTable -from google.cloud.datacatalog_v1.types.dataplex_spec import DataplexFilesetSpec -from google.cloud.datacatalog_v1.types.dataplex_spec import DataplexSpec -from google.cloud.datacatalog_v1.types.dataplex_spec import DataplexTableSpec -from google.cloud.datacatalog_v1.types.dump_content import DumpItem -from google.cloud.datacatalog_v1.types.dump_content import TaggedEntry -from google.cloud.datacatalog_v1.types.gcs_fileset_spec import GcsFilesetSpec -from google.cloud.datacatalog_v1.types.gcs_fileset_spec import GcsFileSpec -from google.cloud.datacatalog_v1.types.physical_schema import PhysicalSchema -from google.cloud.datacatalog_v1.types.policytagmanager import CreatePolicyTagRequest -from google.cloud.datacatalog_v1.types.policytagmanager import CreateTaxonomyRequest -from google.cloud.datacatalog_v1.types.policytagmanager import DeletePolicyTagRequest -from google.cloud.datacatalog_v1.types.policytagmanager import DeleteTaxonomyRequest -from google.cloud.datacatalog_v1.types.policytagmanager import GetPolicyTagRequest -from google.cloud.datacatalog_v1.types.policytagmanager import GetTaxonomyRequest -from google.cloud.datacatalog_v1.types.policytagmanager import ListPolicyTagsRequest -from google.cloud.datacatalog_v1.types.policytagmanager import ListPolicyTagsResponse -from google.cloud.datacatalog_v1.types.policytagmanager import ListTaxonomiesRequest -from google.cloud.datacatalog_v1.types.policytagmanager import ListTaxonomiesResponse -from google.cloud.datacatalog_v1.types.policytagmanager import PolicyTag -from google.cloud.datacatalog_v1.types.policytagmanager import Taxonomy -from google.cloud.datacatalog_v1.types.policytagmanager import UpdatePolicyTagRequest -from google.cloud.datacatalog_v1.types.policytagmanager import UpdateTaxonomyRequest -from google.cloud.datacatalog_v1.types.policytagmanagerserialization import CrossRegionalSource -from google.cloud.datacatalog_v1.types.policytagmanagerserialization import ExportTaxonomiesRequest -from google.cloud.datacatalog_v1.types.policytagmanagerserialization import ExportTaxonomiesResponse -from google.cloud.datacatalog_v1.types.policytagmanagerserialization import ImportTaxonomiesRequest -from google.cloud.datacatalog_v1.types.policytagmanagerserialization import ImportTaxonomiesResponse -from google.cloud.datacatalog_v1.types.policytagmanagerserialization import InlineSource -from google.cloud.datacatalog_v1.types.policytagmanagerserialization import ReplaceTaxonomyRequest -from google.cloud.datacatalog_v1.types.policytagmanagerserialization import SerializedPolicyTag -from google.cloud.datacatalog_v1.types.policytagmanagerserialization import SerializedTaxonomy -from google.cloud.datacatalog_v1.types.schema import ColumnSchema -from google.cloud.datacatalog_v1.types.schema import Schema -from google.cloud.datacatalog_v1.types.search import SearchCatalogResult -from google.cloud.datacatalog_v1.types.search import SearchResultType -from google.cloud.datacatalog_v1.types.table_spec import BigQueryDateShardedSpec -from google.cloud.datacatalog_v1.types.table_spec import BigQueryTableSpec -from google.cloud.datacatalog_v1.types.table_spec import TableSpec -from google.cloud.datacatalog_v1.types.table_spec import ViewSpec -from google.cloud.datacatalog_v1.types.table_spec import TableSourceType -from google.cloud.datacatalog_v1.types.tags import FieldType -from google.cloud.datacatalog_v1.types.tags import Tag -from google.cloud.datacatalog_v1.types.tags import TagField -from google.cloud.datacatalog_v1.types.tags import TagTemplate -from google.cloud.datacatalog_v1.types.tags import TagTemplateField -from google.cloud.datacatalog_v1.types.timestamps import SystemTimestamps -from google.cloud.datacatalog_v1.types.usage import CommonUsageStats -from google.cloud.datacatalog_v1.types.usage import UsageSignal -from google.cloud.datacatalog_v1.types.usage import UsageStats - -__all__ = ('DataCatalogClient', - 'DataCatalogAsyncClient', - 'PolicyTagManagerClient', - 'PolicyTagManagerAsyncClient', - 'PolicyTagManagerSerializationClient', - 'PolicyTagManagerSerializationAsyncClient', - 'BigQueryConnectionSpec', - 'BigQueryRoutineSpec', - 'CloudSqlBigQueryConnectionSpec', - 'PersonalDetails', - 'IntegratedSystem', - 'ManagingSystem', - 'DataSource', - 'StorageProperties', - 'BusinessContext', - 'CloudBigtableInstanceSpec', - 'CloudBigtableSystemSpec', - 'Contacts', - 'CreateEntryGroupRequest', - 'CreateEntryRequest', - 'CreateTagRequest', - 'CreateTagTemplateFieldRequest', - 'CreateTagTemplateRequest', - 'DatabaseTableSpec', - 'DatasetSpec', - 'DataSourceConnectionSpec', - 'DeleteEntryGroupRequest', - 'DeleteEntryRequest', - 'DeleteTagRequest', - 'DeleteTagTemplateFieldRequest', - 'DeleteTagTemplateRequest', - 'Entry', - 'EntryGroup', - 'EntryOverview', - 'FilesetSpec', - 'GetEntryGroupRequest', - 'GetEntryRequest', - 'GetTagTemplateRequest', - 'ImportEntriesMetadata', - 'ImportEntriesRequest', - 'ImportEntriesResponse', - 'ListEntriesRequest', - 'ListEntriesResponse', - 'ListEntryGroupsRequest', - 'ListEntryGroupsResponse', - 'ListTagsRequest', - 'ListTagsResponse', - 'LookerSystemSpec', - 'LookupEntryRequest', - 'ModelSpec', - 'ModifyEntryContactsRequest', - 'ModifyEntryOverviewRequest', - 'ReconcileTagsMetadata', - 'ReconcileTagsRequest', - 'ReconcileTagsResponse', - 'RenameTagTemplateFieldEnumValueRequest', - 'RenameTagTemplateFieldRequest', - 'RoutineSpec', - 'SearchCatalogRequest', - 'SearchCatalogResponse', - 'ServiceSpec', - 'SqlDatabaseSystemSpec', - 'StarEntryRequest', - 'StarEntryResponse', - 'UnstarEntryRequest', - 'UnstarEntryResponse', - 'UpdateEntryGroupRequest', - 'UpdateEntryRequest', - 'UpdateTagRequest', - 'UpdateTagTemplateFieldRequest', - 'UpdateTagTemplateRequest', - 'VertexDatasetSpec', - 'VertexModelSourceInfo', - 'VertexModelSpec', - 'EntryType', - 'DataplexExternalTable', - 'DataplexFilesetSpec', - 'DataplexSpec', - 'DataplexTableSpec', - 'DumpItem', - 'TaggedEntry', - 'GcsFilesetSpec', - 'GcsFileSpec', - 'PhysicalSchema', - 'CreatePolicyTagRequest', - 'CreateTaxonomyRequest', - 'DeletePolicyTagRequest', - 'DeleteTaxonomyRequest', - 'GetPolicyTagRequest', - 'GetTaxonomyRequest', - 'ListPolicyTagsRequest', - 'ListPolicyTagsResponse', - 'ListTaxonomiesRequest', - 'ListTaxonomiesResponse', - 'PolicyTag', - 'Taxonomy', - 'UpdatePolicyTagRequest', - 'UpdateTaxonomyRequest', - 'CrossRegionalSource', - 'ExportTaxonomiesRequest', - 'ExportTaxonomiesResponse', - 'ImportTaxonomiesRequest', - 'ImportTaxonomiesResponse', - 'InlineSource', - 'ReplaceTaxonomyRequest', - 'SerializedPolicyTag', - 'SerializedTaxonomy', - 'ColumnSchema', - 'Schema', - 'SearchCatalogResult', - 'SearchResultType', - 'BigQueryDateShardedSpec', - 'BigQueryTableSpec', - 'TableSpec', - 'ViewSpec', - 'TableSourceType', - 'FieldType', - 'Tag', - 'TagField', - 'TagTemplate', - 'TagTemplateField', - 'SystemTimestamps', - 'CommonUsageStats', - 'UsageSignal', - 'UsageStats', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/gapic_version.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/gapic_version.py deleted file mode 100644 index 360a0d13ebdd..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/py.typed b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/py.typed deleted file mode 100644 index bb4088a3c198..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-datacatalog package uses inline types. diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/__init__.py deleted file mode 100644 index a8482519bf0d..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/__init__.py +++ /dev/null @@ -1,274 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.datacatalog_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.data_catalog import DataCatalogClient -from .services.data_catalog import DataCatalogAsyncClient -from .services.policy_tag_manager import PolicyTagManagerClient -from .services.policy_tag_manager import PolicyTagManagerAsyncClient -from .services.policy_tag_manager_serialization import PolicyTagManagerSerializationClient -from .services.policy_tag_manager_serialization import PolicyTagManagerSerializationAsyncClient - -from .types.bigquery import BigQueryConnectionSpec -from .types.bigquery import BigQueryRoutineSpec -from .types.bigquery import CloudSqlBigQueryConnectionSpec -from .types.common import PersonalDetails -from .types.common import IntegratedSystem -from .types.common import ManagingSystem -from .types.data_source import DataSource -from .types.data_source import StorageProperties -from .types.datacatalog import BusinessContext -from .types.datacatalog import CloudBigtableInstanceSpec -from .types.datacatalog import CloudBigtableSystemSpec -from .types.datacatalog import Contacts -from .types.datacatalog import CreateEntryGroupRequest -from .types.datacatalog import CreateEntryRequest -from .types.datacatalog import CreateTagRequest -from .types.datacatalog import CreateTagTemplateFieldRequest -from .types.datacatalog import CreateTagTemplateRequest -from .types.datacatalog import DatabaseTableSpec -from .types.datacatalog import DatasetSpec -from .types.datacatalog import DataSourceConnectionSpec -from .types.datacatalog import DeleteEntryGroupRequest -from .types.datacatalog import DeleteEntryRequest -from .types.datacatalog import DeleteTagRequest -from .types.datacatalog import DeleteTagTemplateFieldRequest -from .types.datacatalog import DeleteTagTemplateRequest -from .types.datacatalog import Entry -from .types.datacatalog import EntryGroup -from .types.datacatalog import EntryOverview -from .types.datacatalog import FilesetSpec -from .types.datacatalog import GetEntryGroupRequest -from .types.datacatalog import GetEntryRequest -from .types.datacatalog import GetTagTemplateRequest -from .types.datacatalog import ImportEntriesMetadata -from .types.datacatalog import ImportEntriesRequest -from .types.datacatalog import ImportEntriesResponse -from .types.datacatalog import ListEntriesRequest -from .types.datacatalog import ListEntriesResponse -from .types.datacatalog import ListEntryGroupsRequest -from .types.datacatalog import ListEntryGroupsResponse -from .types.datacatalog import ListTagsRequest -from .types.datacatalog import ListTagsResponse -from .types.datacatalog import LookerSystemSpec -from .types.datacatalog import LookupEntryRequest -from .types.datacatalog import ModelSpec -from .types.datacatalog import ModifyEntryContactsRequest -from .types.datacatalog import ModifyEntryOverviewRequest -from .types.datacatalog import ReconcileTagsMetadata -from .types.datacatalog import ReconcileTagsRequest -from .types.datacatalog import ReconcileTagsResponse -from .types.datacatalog import RenameTagTemplateFieldEnumValueRequest -from .types.datacatalog import RenameTagTemplateFieldRequest -from .types.datacatalog import RoutineSpec -from .types.datacatalog import SearchCatalogRequest -from .types.datacatalog import SearchCatalogResponse -from .types.datacatalog import ServiceSpec -from .types.datacatalog import SqlDatabaseSystemSpec -from .types.datacatalog import StarEntryRequest -from .types.datacatalog import StarEntryResponse -from .types.datacatalog import UnstarEntryRequest -from .types.datacatalog import UnstarEntryResponse -from .types.datacatalog import UpdateEntryGroupRequest -from .types.datacatalog import UpdateEntryRequest -from .types.datacatalog import UpdateTagRequest -from .types.datacatalog import UpdateTagTemplateFieldRequest -from .types.datacatalog import UpdateTagTemplateRequest -from .types.datacatalog import VertexDatasetSpec -from .types.datacatalog import VertexModelSourceInfo -from .types.datacatalog import VertexModelSpec -from .types.datacatalog import EntryType -from .types.dataplex_spec import DataplexExternalTable -from .types.dataplex_spec import DataplexFilesetSpec -from .types.dataplex_spec import DataplexSpec -from .types.dataplex_spec import DataplexTableSpec -from .types.dump_content import DumpItem -from .types.dump_content import TaggedEntry -from .types.gcs_fileset_spec import GcsFilesetSpec -from .types.gcs_fileset_spec import GcsFileSpec -from .types.physical_schema import PhysicalSchema -from .types.policytagmanager import CreatePolicyTagRequest -from .types.policytagmanager import CreateTaxonomyRequest -from .types.policytagmanager import DeletePolicyTagRequest -from .types.policytagmanager import DeleteTaxonomyRequest -from .types.policytagmanager import GetPolicyTagRequest -from .types.policytagmanager import GetTaxonomyRequest -from .types.policytagmanager import ListPolicyTagsRequest -from .types.policytagmanager import ListPolicyTagsResponse -from .types.policytagmanager import ListTaxonomiesRequest -from .types.policytagmanager import ListTaxonomiesResponse -from .types.policytagmanager import PolicyTag -from .types.policytagmanager import Taxonomy -from .types.policytagmanager import UpdatePolicyTagRequest -from .types.policytagmanager import UpdateTaxonomyRequest -from .types.policytagmanagerserialization import CrossRegionalSource -from .types.policytagmanagerserialization import ExportTaxonomiesRequest -from .types.policytagmanagerserialization import ExportTaxonomiesResponse -from .types.policytagmanagerserialization import ImportTaxonomiesRequest -from .types.policytagmanagerserialization import ImportTaxonomiesResponse -from .types.policytagmanagerserialization import InlineSource -from .types.policytagmanagerserialization import ReplaceTaxonomyRequest -from .types.policytagmanagerserialization import SerializedPolicyTag -from .types.policytagmanagerserialization import SerializedTaxonomy -from .types.schema import ColumnSchema -from .types.schema import Schema -from .types.search import SearchCatalogResult -from .types.search import SearchResultType -from .types.table_spec import BigQueryDateShardedSpec -from .types.table_spec import BigQueryTableSpec -from .types.table_spec import TableSpec -from .types.table_spec import ViewSpec -from .types.table_spec import TableSourceType -from .types.tags import FieldType -from .types.tags import Tag -from .types.tags import TagField -from .types.tags import TagTemplate -from .types.tags import TagTemplateField -from .types.timestamps import SystemTimestamps -from .types.usage import CommonUsageStats -from .types.usage import UsageSignal -from .types.usage import UsageStats - -__all__ = ( - 'DataCatalogAsyncClient', - 'PolicyTagManagerAsyncClient', - 'PolicyTagManagerSerializationAsyncClient', -'BigQueryConnectionSpec', -'BigQueryDateShardedSpec', -'BigQueryRoutineSpec', -'BigQueryTableSpec', -'BusinessContext', -'CloudBigtableInstanceSpec', -'CloudBigtableSystemSpec', -'CloudSqlBigQueryConnectionSpec', -'ColumnSchema', -'CommonUsageStats', -'Contacts', -'CreateEntryGroupRequest', -'CreateEntryRequest', -'CreatePolicyTagRequest', -'CreateTagRequest', -'CreateTagTemplateFieldRequest', -'CreateTagTemplateRequest', -'CreateTaxonomyRequest', -'CrossRegionalSource', -'DataCatalogClient', -'DataSource', -'DataSourceConnectionSpec', -'DatabaseTableSpec', -'DataplexExternalTable', -'DataplexFilesetSpec', -'DataplexSpec', -'DataplexTableSpec', -'DatasetSpec', -'DeleteEntryGroupRequest', -'DeleteEntryRequest', -'DeletePolicyTagRequest', -'DeleteTagRequest', -'DeleteTagTemplateFieldRequest', -'DeleteTagTemplateRequest', -'DeleteTaxonomyRequest', -'DumpItem', -'Entry', -'EntryGroup', -'EntryOverview', -'EntryType', -'ExportTaxonomiesRequest', -'ExportTaxonomiesResponse', -'FieldType', -'FilesetSpec', -'GcsFileSpec', -'GcsFilesetSpec', -'GetEntryGroupRequest', -'GetEntryRequest', -'GetPolicyTagRequest', -'GetTagTemplateRequest', -'GetTaxonomyRequest', -'ImportEntriesMetadata', -'ImportEntriesRequest', -'ImportEntriesResponse', -'ImportTaxonomiesRequest', -'ImportTaxonomiesResponse', -'InlineSource', -'IntegratedSystem', -'ListEntriesRequest', -'ListEntriesResponse', -'ListEntryGroupsRequest', -'ListEntryGroupsResponse', -'ListPolicyTagsRequest', -'ListPolicyTagsResponse', -'ListTagsRequest', -'ListTagsResponse', -'ListTaxonomiesRequest', -'ListTaxonomiesResponse', -'LookerSystemSpec', -'LookupEntryRequest', -'ManagingSystem', -'ModelSpec', -'ModifyEntryContactsRequest', -'ModifyEntryOverviewRequest', -'PersonalDetails', -'PhysicalSchema', -'PolicyTag', -'PolicyTagManagerClient', -'PolicyTagManagerSerializationClient', -'ReconcileTagsMetadata', -'ReconcileTagsRequest', -'ReconcileTagsResponse', -'RenameTagTemplateFieldEnumValueRequest', -'RenameTagTemplateFieldRequest', -'ReplaceTaxonomyRequest', -'RoutineSpec', -'Schema', -'SearchCatalogRequest', -'SearchCatalogResponse', -'SearchCatalogResult', -'SearchResultType', -'SerializedPolicyTag', -'SerializedTaxonomy', -'ServiceSpec', -'SqlDatabaseSystemSpec', -'StarEntryRequest', -'StarEntryResponse', -'StorageProperties', -'SystemTimestamps', -'TableSourceType', -'TableSpec', -'Tag', -'TagField', -'TagTemplate', -'TagTemplateField', -'TaggedEntry', -'Taxonomy', -'UnstarEntryRequest', -'UnstarEntryResponse', -'UpdateEntryGroupRequest', -'UpdateEntryRequest', -'UpdatePolicyTagRequest', -'UpdateTagRequest', -'UpdateTagTemplateFieldRequest', -'UpdateTagTemplateRequest', -'UpdateTaxonomyRequest', -'UsageSignal', -'UsageStats', -'VertexDatasetSpec', -'VertexModelSourceInfo', -'VertexModelSpec', -'ViewSpec', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/gapic_metadata.json deleted file mode 100644 index 447d15595848..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/gapic_metadata.json +++ /dev/null @@ -1,551 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.datacatalog_v1", - "protoPackage": "google.cloud.datacatalog.v1", - "schema": "1.0", - "services": { - "DataCatalog": { - "clients": { - "grpc": { - "libraryClient": "DataCatalogClient", - "rpcs": { - "CreateEntry": { - "methods": [ - "create_entry" - ] - }, - "CreateEntryGroup": { - "methods": [ - "create_entry_group" - ] - }, - "CreateTag": { - "methods": [ - "create_tag" - ] - }, - "CreateTagTemplate": { - "methods": [ - "create_tag_template" - ] - }, - "CreateTagTemplateField": { - "methods": [ - "create_tag_template_field" - ] - }, - "DeleteEntry": { - "methods": [ - "delete_entry" - ] - }, - "DeleteEntryGroup": { - "methods": [ - "delete_entry_group" - ] - }, - "DeleteTag": { - "methods": [ - "delete_tag" - ] - }, - "DeleteTagTemplate": { - "methods": [ - "delete_tag_template" - ] - }, - "DeleteTagTemplateField": { - "methods": [ - "delete_tag_template_field" - ] - }, - "GetEntry": { - "methods": [ - "get_entry" - ] - }, - "GetEntryGroup": { - "methods": [ - "get_entry_group" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetTagTemplate": { - "methods": [ - "get_tag_template" - ] - }, - "ImportEntries": { - "methods": [ - "import_entries" - ] - }, - "ListEntries": { - "methods": [ - "list_entries" - ] - }, - "ListEntryGroups": { - "methods": [ - "list_entry_groups" - ] - }, - "ListTags": { - "methods": [ - "list_tags" - ] - }, - "LookupEntry": { - "methods": [ - "lookup_entry" - ] - }, - "ModifyEntryContacts": { - "methods": [ - "modify_entry_contacts" - ] - }, - "ModifyEntryOverview": { - "methods": [ - "modify_entry_overview" - ] - }, - "ReconcileTags": { - "methods": [ - "reconcile_tags" - ] - }, - "RenameTagTemplateField": { - "methods": [ - "rename_tag_template_field" - ] - }, - "RenameTagTemplateFieldEnumValue": { - "methods": [ - "rename_tag_template_field_enum_value" - ] - }, - "SearchCatalog": { - "methods": [ - "search_catalog" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "StarEntry": { - "methods": [ - "star_entry" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UnstarEntry": { - "methods": [ - "unstar_entry" - ] - }, - "UpdateEntry": { - "methods": [ - "update_entry" - ] - }, - "UpdateEntryGroup": { - "methods": [ - "update_entry_group" - ] - }, - "UpdateTag": { - "methods": [ - "update_tag" - ] - }, - "UpdateTagTemplate": { - "methods": [ - "update_tag_template" - ] - }, - "UpdateTagTemplateField": { - "methods": [ - "update_tag_template_field" - ] - } - } - }, - "grpc-async": { - "libraryClient": "DataCatalogAsyncClient", - "rpcs": { - "CreateEntry": { - "methods": [ - "create_entry" - ] - }, - "CreateEntryGroup": { - "methods": [ - "create_entry_group" - ] - }, - "CreateTag": { - "methods": [ - "create_tag" - ] - }, - "CreateTagTemplate": { - "methods": [ - "create_tag_template" - ] - }, - "CreateTagTemplateField": { - "methods": [ - "create_tag_template_field" - ] - }, - "DeleteEntry": { - "methods": [ - "delete_entry" - ] - }, - "DeleteEntryGroup": { - "methods": [ - "delete_entry_group" - ] - }, - "DeleteTag": { - "methods": [ - "delete_tag" - ] - }, - "DeleteTagTemplate": { - "methods": [ - "delete_tag_template" - ] - }, - "DeleteTagTemplateField": { - "methods": [ - "delete_tag_template_field" - ] - }, - "GetEntry": { - "methods": [ - "get_entry" - ] - }, - "GetEntryGroup": { - "methods": [ - "get_entry_group" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetTagTemplate": { - "methods": [ - "get_tag_template" - ] - }, - "ImportEntries": { - "methods": [ - "import_entries" - ] - }, - "ListEntries": { - "methods": [ - "list_entries" - ] - }, - "ListEntryGroups": { - "methods": [ - "list_entry_groups" - ] - }, - "ListTags": { - "methods": [ - "list_tags" - ] - }, - "LookupEntry": { - "methods": [ - "lookup_entry" - ] - }, - "ModifyEntryContacts": { - "methods": [ - "modify_entry_contacts" - ] - }, - "ModifyEntryOverview": { - "methods": [ - "modify_entry_overview" - ] - }, - "ReconcileTags": { - "methods": [ - "reconcile_tags" - ] - }, - "RenameTagTemplateField": { - "methods": [ - "rename_tag_template_field" - ] - }, - "RenameTagTemplateFieldEnumValue": { - "methods": [ - "rename_tag_template_field_enum_value" - ] - }, - "SearchCatalog": { - "methods": [ - "search_catalog" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "StarEntry": { - "methods": [ - "star_entry" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UnstarEntry": { - "methods": [ - "unstar_entry" - ] - }, - "UpdateEntry": { - "methods": [ - "update_entry" - ] - }, - "UpdateEntryGroup": { - "methods": [ - "update_entry_group" - ] - }, - "UpdateTag": { - "methods": [ - "update_tag" - ] - }, - "UpdateTagTemplate": { - "methods": [ - "update_tag_template" - ] - }, - "UpdateTagTemplateField": { - "methods": [ - "update_tag_template_field" - ] - } - } - } - } - }, - "PolicyTagManager": { - "clients": { - "grpc": { - "libraryClient": "PolicyTagManagerClient", - "rpcs": { - "CreatePolicyTag": { - "methods": [ - "create_policy_tag" - ] - }, - "CreateTaxonomy": { - "methods": [ - "create_taxonomy" - ] - }, - "DeletePolicyTag": { - "methods": [ - "delete_policy_tag" - ] - }, - "DeleteTaxonomy": { - "methods": [ - "delete_taxonomy" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetPolicyTag": { - "methods": [ - "get_policy_tag" - ] - }, - "GetTaxonomy": { - "methods": [ - "get_taxonomy" - ] - }, - "ListPolicyTags": { - "methods": [ - "list_policy_tags" - ] - }, - "ListTaxonomies": { - "methods": [ - "list_taxonomies" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdatePolicyTag": { - "methods": [ - "update_policy_tag" - ] - }, - "UpdateTaxonomy": { - "methods": [ - "update_taxonomy" - ] - } - } - }, - "grpc-async": { - "libraryClient": "PolicyTagManagerAsyncClient", - "rpcs": { - "CreatePolicyTag": { - "methods": [ - "create_policy_tag" - ] - }, - "CreateTaxonomy": { - "methods": [ - "create_taxonomy" - ] - }, - "DeletePolicyTag": { - "methods": [ - "delete_policy_tag" - ] - }, - "DeleteTaxonomy": { - "methods": [ - "delete_taxonomy" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetPolicyTag": { - "methods": [ - "get_policy_tag" - ] - }, - "GetTaxonomy": { - "methods": [ - "get_taxonomy" - ] - }, - "ListPolicyTags": { - "methods": [ - "list_policy_tags" - ] - }, - "ListTaxonomies": { - "methods": [ - "list_taxonomies" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdatePolicyTag": { - "methods": [ - "update_policy_tag" - ] - }, - "UpdateTaxonomy": { - "methods": [ - "update_taxonomy" - ] - } - } - } - } - }, - "PolicyTagManagerSerialization": { - "clients": { - "grpc": { - "libraryClient": "PolicyTagManagerSerializationClient", - "rpcs": { - "ExportTaxonomies": { - "methods": [ - "export_taxonomies" - ] - }, - "ImportTaxonomies": { - "methods": [ - "import_taxonomies" - ] - }, - "ReplaceTaxonomy": { - "methods": [ - "replace_taxonomy" - ] - } - } - }, - "grpc-async": { - "libraryClient": "PolicyTagManagerSerializationAsyncClient", - "rpcs": { - "ExportTaxonomies": { - "methods": [ - "export_taxonomies" - ] - }, - "ImportTaxonomies": { - "methods": [ - "import_taxonomies" - ] - }, - "ReplaceTaxonomy": { - "methods": [ - "replace_taxonomy" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/gapic_version.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/gapic_version.py deleted file mode 100644 index 360a0d13ebdd..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/py.typed b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/py.typed deleted file mode 100644 index bb4088a3c198..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-datacatalog package uses inline types. diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/__init__.py deleted file mode 100644 index 89a37dc92c5a..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/__init__.py deleted file mode 100644 index e703e914bb2c..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import DataCatalogClient -from .async_client import DataCatalogAsyncClient - -__all__ = ( - 'DataCatalogClient', - 'DataCatalogAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/async_client.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/async_client.py deleted file mode 100644 index 1f4c84524fc9..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/async_client.py +++ /dev/null @@ -1,4510 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.datacatalog_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.datacatalog_v1.services.data_catalog import pagers -from google.cloud.datacatalog_v1.types import common -from google.cloud.datacatalog_v1.types import data_source -from google.cloud.datacatalog_v1.types import datacatalog -from google.cloud.datacatalog_v1.types import gcs_fileset_spec -from google.cloud.datacatalog_v1.types import schema -from google.cloud.datacatalog_v1.types import search -from google.cloud.datacatalog_v1.types import table_spec -from google.cloud.datacatalog_v1.types import tags -from google.cloud.datacatalog_v1.types import timestamps -from google.cloud.datacatalog_v1.types import usage -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import DataCatalogGrpcAsyncIOTransport -from .client import DataCatalogClient - - -class DataCatalogAsyncClient: - """Data Catalog API service allows you to discover, understand, - and manage your data. - """ - - _client: DataCatalogClient - - DEFAULT_ENDPOINT = DataCatalogClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = DataCatalogClient.DEFAULT_MTLS_ENDPOINT - - entry_path = staticmethod(DataCatalogClient.entry_path) - parse_entry_path = staticmethod(DataCatalogClient.parse_entry_path) - entry_group_path = staticmethod(DataCatalogClient.entry_group_path) - parse_entry_group_path = staticmethod(DataCatalogClient.parse_entry_group_path) - tag_path = staticmethod(DataCatalogClient.tag_path) - parse_tag_path = staticmethod(DataCatalogClient.parse_tag_path) - tag_template_path = staticmethod(DataCatalogClient.tag_template_path) - parse_tag_template_path = staticmethod(DataCatalogClient.parse_tag_template_path) - tag_template_field_path = staticmethod(DataCatalogClient.tag_template_field_path) - parse_tag_template_field_path = staticmethod(DataCatalogClient.parse_tag_template_field_path) - tag_template_field_enum_value_path = staticmethod(DataCatalogClient.tag_template_field_enum_value_path) - parse_tag_template_field_enum_value_path = staticmethod(DataCatalogClient.parse_tag_template_field_enum_value_path) - common_billing_account_path = staticmethod(DataCatalogClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(DataCatalogClient.parse_common_billing_account_path) - common_folder_path = staticmethod(DataCatalogClient.common_folder_path) - parse_common_folder_path = staticmethod(DataCatalogClient.parse_common_folder_path) - common_organization_path = staticmethod(DataCatalogClient.common_organization_path) - parse_common_organization_path = staticmethod(DataCatalogClient.parse_common_organization_path) - common_project_path = staticmethod(DataCatalogClient.common_project_path) - parse_common_project_path = staticmethod(DataCatalogClient.parse_common_project_path) - common_location_path = staticmethod(DataCatalogClient.common_location_path) - parse_common_location_path = staticmethod(DataCatalogClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataCatalogAsyncClient: The constructed client. - """ - return DataCatalogClient.from_service_account_info.__func__(DataCatalogAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataCatalogAsyncClient: The constructed client. - """ - return DataCatalogClient.from_service_account_file.__func__(DataCatalogAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return DataCatalogClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> DataCatalogTransport: - """Returns the transport used by the client instance. - - Returns: - DataCatalogTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(DataCatalogClient).get_transport_class, type(DataCatalogClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, DataCatalogTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the data catalog client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.DataCatalogTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = DataCatalogClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def search_catalog(self, - request: Optional[Union[datacatalog.SearchCatalogRequest, dict]] = None, - *, - scope: Optional[datacatalog.SearchCatalogRequest.Scope] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.SearchCatalogAsyncPager: - r"""Searches Data Catalog for multiple resources like entries and - tags that match a query. - - This is a [Custom Method] - (https://cloud.google.com/apis/design/custom_methods) that - doesn't return all information on a resource, only its ID and - high level fields. To get more information, you can subsequently - call specific get methods. - - Note: Data Catalog search queries don't guarantee full recall. - Results that match your query might not be returned, even in - subsequent result pages. Additionally, returned (and not - returned) results can vary if you repeat search queries. - - For more information, see [Data Catalog search syntax] - (https://cloud.google.com/data-catalog/docs/how-to/search-reference). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_search_catalog(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.SearchCatalogRequest( - ) - - # Make the request - page_result = client.search_catalog(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.SearchCatalogRequest, dict]]): - The request object. Request message for - [SearchCatalog][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog]. - scope (:class:`google.cloud.datacatalog_v1.types.SearchCatalogRequest.Scope`): - Required. The scope of this search request. - - The ``scope`` is invalid if ``include_org_ids``, - ``include_project_ids`` are empty AND - ``include_gcp_public_datasets`` is set to ``false``. In - this case, the request returns an error. - - This corresponds to the ``scope`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - query (:class:`str`): - Optional. The query string with a minimum of 3 - characters and specific syntax. For more information, - see `Data Catalog search - syntax `__. - - An empty query string returns all data assets (in the - specified scope) that you have access to. - - A query string can be a simple ``xyz`` or qualified by - predicates: - - - ``name:x`` - - ``column:y`` - - ``description:z`` - - This corresponds to the ``query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.services.data_catalog.pagers.SearchCatalogAsyncPager: - Response message for - [SearchCatalog][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, query]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.SearchCatalogRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if scope is not None: - request.scope = scope - if query is not None: - request.query = query - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.search_catalog, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.SearchCatalogAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_entry_group(self, - request: Optional[Union[datacatalog.CreateEntryGroupRequest, dict]] = None, - *, - parent: Optional[str] = None, - entry_group_id: Optional[str] = None, - entry_group: Optional[datacatalog.EntryGroup] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.EntryGroup: - r"""Creates an entry group. - - An entry group contains logically related entries together with - `Cloud Identity and Access - Management `__ policies. These - policies specify users who can create, edit, and view entries - within entry groups. - - Data Catalog automatically creates entry groups with names that - start with the ``@`` symbol for the following resources: - - - BigQuery entries (``@bigquery``) - - Pub/Sub topics (``@pubsub``) - - Dataproc Metastore services - (``@dataproc_metastore_{SERVICE_NAME_HASH}``) - - You can create your own entry groups for Cloud Storage fileset - entries and custom entries together with the corresponding IAM - policies. User-created entry groups can't contain the ``@`` - symbol, it is reserved for automatically created groups. - - Entry groups, like entries, can be searched. - - A maximum of 10,000 entry groups may be created per organization - across all locations. - - You must enable the Data Catalog API in the project identified - by the ``parent`` parameter. For more information, see `Data - Catalog resource - project `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_create_entry_group(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.CreateEntryGroupRequest( - parent="parent_value", - entry_group_id="entry_group_id_value", - ) - - # Make the request - response = await client.create_entry_group(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.CreateEntryGroupRequest, dict]]): - The request object. Request message for - [CreateEntryGroup][google.cloud.datacatalog.v1.DataCatalog.CreateEntryGroup]. - parent (:class:`str`): - Required. The names of the project - and location that the new entry group - belongs to. - - Note: The entry group itself and its - child resources might not be stored in - the location specified in its name. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_group_id (:class:`str`): - Required. The ID of the entry group to create. - - The ID must contain only letters (a-z, A-Z), numbers - (0-9), underscores (_), and must start with a letter or - underscore. The maximum size is 64 bytes when encoded in - UTF-8. - - This corresponds to the ``entry_group_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_group (:class:`google.cloud.datacatalog_v1.types.EntryGroup`): - The entry group to create. Defaults - to empty. - - This corresponds to the ``entry_group`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.EntryGroup: - Entry group metadata. - - An EntryGroup resource represents a logical grouping - of zero or more Data Catalog - [Entry][google.cloud.datacatalog.v1.Entry] resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, entry_group_id, entry_group]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.CreateEntryGroupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entry_group_id is not None: - request.entry_group_id = entry_group_id - if entry_group is not None: - request.entry_group = entry_group - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_entry_group, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_entry_group(self, - request: Optional[Union[datacatalog.GetEntryGroupRequest, dict]] = None, - *, - name: Optional[str] = None, - read_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.EntryGroup: - r"""Gets an entry group. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_get_entry_group(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.GetEntryGroupRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entry_group(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.GetEntryGroupRequest, dict]]): - The request object. Request message for - [GetEntryGroup][google.cloud.datacatalog.v1.DataCatalog.GetEntryGroup]. - name (:class:`str`): - Required. The name of the entry group - to get. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - read_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The fields to return. If empty or - omitted, all fields are returned. - - This corresponds to the ``read_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.EntryGroup: - Entry group metadata. - - An EntryGroup resource represents a logical grouping - of zero or more Data Catalog - [Entry][google.cloud.datacatalog.v1.Entry] resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, read_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.GetEntryGroupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if read_mask is not None: - request.read_mask = read_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_entry_group, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_entry_group(self, - request: Optional[Union[datacatalog.UpdateEntryGroupRequest, dict]] = None, - *, - entry_group: Optional[datacatalog.EntryGroup] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.EntryGroup: - r"""Updates an entry group. - - You must enable the Data Catalog API in the project identified - by the ``entry_group.name`` parameter. For more information, see - `Data Catalog resource - project `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_update_entry_group(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.UpdateEntryGroupRequest( - ) - - # Make the request - response = await client.update_entry_group(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.UpdateEntryGroupRequest, dict]]): - The request object. Request message for - [UpdateEntryGroup][google.cloud.datacatalog.v1.DataCatalog.UpdateEntryGroup]. - entry_group (:class:`google.cloud.datacatalog_v1.types.EntryGroup`): - Required. Updates for the entry group. The ``name`` - field must be set. - - This corresponds to the ``entry_group`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Names of fields whose values to - overwrite on an entry group. - If this parameter is absent or empty, - all modifiable fields are overwritten. - If such fields are non-required and - omitted in the request body, their - values are emptied. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.EntryGroup: - Entry group metadata. - - An EntryGroup resource represents a logical grouping - of zero or more Data Catalog - [Entry][google.cloud.datacatalog.v1.Entry] resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([entry_group, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.UpdateEntryGroupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if entry_group is not None: - request.entry_group = entry_group - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_entry_group, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entry_group.name", request.entry_group.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_entry_group(self, - request: Optional[Union[datacatalog.DeleteEntryGroupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an entry group. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see `Data - Catalog resource - project `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_delete_entry_group(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteEntryGroupRequest( - name="name_value", - ) - - # Make the request - await client.delete_entry_group(request=request) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.DeleteEntryGroupRequest, dict]]): - The request object. Request message for - [DeleteEntryGroup][google.cloud.datacatalog.v1.DataCatalog.DeleteEntryGroup]. - name (:class:`str`): - Required. The name of the entry group - to delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.DeleteEntryGroupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_entry_group, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_entry_groups(self, - request: Optional[Union[datacatalog.ListEntryGroupsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListEntryGroupsAsyncPager: - r"""Lists entry groups. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_list_entry_groups(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.ListEntryGroupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_groups(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.ListEntryGroupsRequest, dict]]): - The request object. Request message for - [ListEntryGroups][google.cloud.datacatalog.v1.DataCatalog.ListEntryGroups]. - parent (:class:`str`): - Required. The name of the location - that contains the entry groups to list. - Can be provided as a URL. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.services.data_catalog.pagers.ListEntryGroupsAsyncPager: - Response message for - [ListEntryGroups][google.cloud.datacatalog.v1.DataCatalog.ListEntryGroups]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.ListEntryGroupsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_entry_groups, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListEntryGroupsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_entry(self, - request: Optional[Union[datacatalog.CreateEntryRequest, dict]] = None, - *, - parent: Optional[str] = None, - entry_id: Optional[str] = None, - entry: Optional[datacatalog.Entry] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.Entry: - r"""Creates an entry. - - You can create entries only with 'FILESET', 'CLUSTER', - 'DATA_STREAM', or custom types. Data Catalog automatically - creates entries with other types during metadata ingestion from - integrated systems. - - You must enable the Data Catalog API in the project identified - by the ``parent`` parameter. For more information, see `Data - Catalog resource - project `__. - - An entry group can have a maximum of 100,000 entries. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_create_entry(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - entry = datacatalog_v1.Entry() - entry.type_ = "LOOK" - entry.integrated_system = "VERTEX_AI" - entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] - - request = datacatalog_v1.CreateEntryRequest( - parent="parent_value", - entry_id="entry_id_value", - entry=entry, - ) - - # Make the request - response = await client.create_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.CreateEntryRequest, dict]]): - The request object. Request message for - [CreateEntry][google.cloud.datacatalog.v1.DataCatalog.CreateEntry]. - parent (:class:`str`): - Required. The name of the entry group - this entry belongs to. - Note: The entry itself and its child - resources might not be stored in the - location specified in its name. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_id (:class:`str`): - Required. The ID of the entry to create. - - The ID must contain only letters (a-z, A-Z), numbers - (0-9), and underscores (_). The maximum size is 64 bytes - when encoded in UTF-8. - - This corresponds to the ``entry_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry (:class:`google.cloud.datacatalog_v1.types.Entry`): - Required. The entry to create. - This corresponds to the ``entry`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.Entry: - Entry metadata. - A Data Catalog entry represents another resource in - Google Cloud Platform (such as a BigQuery dataset or - a Pub/Sub topic) or outside of it. You can use the - linked_resource field in the entry resource to refer - to the original resource ID of the source system. - - An entry resource contains resource details, for - example, its schema. Additionally, you can attach - flexible metadata to an entry in the form of a - [Tag][google.cloud.datacatalog.v1.Tag]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, entry_id, entry]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.CreateEntryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entry_id is not None: - request.entry_id = entry_id - if entry is not None: - request.entry = entry - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_entry, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_entry(self, - request: Optional[Union[datacatalog.UpdateEntryRequest, dict]] = None, - *, - entry: Optional[datacatalog.Entry] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.Entry: - r"""Updates an existing entry. - - You must enable the Data Catalog API in the project identified - by the ``entry.name`` parameter. For more information, see `Data - Catalog resource - project `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_update_entry(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - entry = datacatalog_v1.Entry() - entry.type_ = "LOOK" - entry.integrated_system = "VERTEX_AI" - entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] - - request = datacatalog_v1.UpdateEntryRequest( - entry=entry, - ) - - # Make the request - response = await client.update_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.UpdateEntryRequest, dict]]): - The request object. Request message for - [UpdateEntry][google.cloud.datacatalog.v1.DataCatalog.UpdateEntry]. - entry (:class:`google.cloud.datacatalog_v1.types.Entry`): - Required. Updates for the entry. The ``name`` field must - be set. - - This corresponds to the ``entry`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Names of fields whose values to overwrite on an entry. - - If this parameter is absent or empty, all modifiable - fields are overwritten. If such fields are non-required - and omitted in the request body, their values are - emptied. - - You can modify only the fields listed below. - - For entries with type ``DATA_STREAM``: - - - ``schema`` - - For entries with type ``FILESET``: - - - ``schema`` - - ``display_name`` - - ``description`` - - ``gcs_fileset_spec`` - - ``gcs_fileset_spec.file_patterns`` - - For entries with ``user_specified_type``: - - - ``schema`` - - ``display_name`` - - ``description`` - - ``user_specified_type`` - - ``user_specified_system`` - - ``linked_resource`` - - ``source_system_timestamps`` - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.Entry: - Entry metadata. - A Data Catalog entry represents another resource in - Google Cloud Platform (such as a BigQuery dataset or - a Pub/Sub topic) or outside of it. You can use the - linked_resource field in the entry resource to refer - to the original resource ID of the source system. - - An entry resource contains resource details, for - example, its schema. Additionally, you can attach - flexible metadata to an entry in the form of a - [Tag][google.cloud.datacatalog.v1.Tag]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([entry, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.UpdateEntryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if entry is not None: - request.entry = entry - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_entry, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entry.name", request.entry.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_entry(self, - request: Optional[Union[datacatalog.DeleteEntryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an existing entry. - - You can delete only the entries created by the - [CreateEntry][google.cloud.datacatalog.v1.DataCatalog.CreateEntry] - method. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see `Data - Catalog resource - project `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_delete_entry(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteEntryRequest( - name="name_value", - ) - - # Make the request - await client.delete_entry(request=request) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.DeleteEntryRequest, dict]]): - The request object. Request message for - [DeleteEntry][google.cloud.datacatalog.v1.DataCatalog.DeleteEntry]. - name (:class:`str`): - Required. The name of the entry to - delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.DeleteEntryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_entry, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_entry(self, - request: Optional[Union[datacatalog.GetEntryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.Entry: - r"""Gets an entry. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_get_entry(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.GetEntryRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.GetEntryRequest, dict]]): - The request object. Request message for - [GetEntry][google.cloud.datacatalog.v1.DataCatalog.GetEntry]. - name (:class:`str`): - Required. The name of the entry to - get. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.Entry: - Entry metadata. - A Data Catalog entry represents another resource in - Google Cloud Platform (such as a BigQuery dataset or - a Pub/Sub topic) or outside of it. You can use the - linked_resource field in the entry resource to refer - to the original resource ID of the source system. - - An entry resource contains resource details, for - example, its schema. Additionally, you can attach - flexible metadata to an entry in the form of a - [Tag][google.cloud.datacatalog.v1.Tag]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.GetEntryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_entry, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def lookup_entry(self, - request: Optional[Union[datacatalog.LookupEntryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.Entry: - r"""Gets an entry by its target resource name. - - The resource name comes from the source Google Cloud - Platform service. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_lookup_entry(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.LookupEntryRequest( - linked_resource="linked_resource_value", - ) - - # Make the request - response = await client.lookup_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.LookupEntryRequest, dict]]): - The request object. Request message for - [LookupEntry][google.cloud.datacatalog.v1.DataCatalog.LookupEntry]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.Entry: - Entry metadata. - A Data Catalog entry represents another resource in - Google Cloud Platform (such as a BigQuery dataset or - a Pub/Sub topic) or outside of it. You can use the - linked_resource field in the entry resource to refer - to the original resource ID of the source system. - - An entry resource contains resource details, for - example, its schema. Additionally, you can attach - flexible metadata to an entry in the form of a - [Tag][google.cloud.datacatalog.v1.Tag]. - - """ - # Create or coerce a protobuf request object. - request = datacatalog.LookupEntryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.lookup_entry, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_entries(self, - request: Optional[Union[datacatalog.ListEntriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListEntriesAsyncPager: - r"""Lists entries. - - Note: Currently, this method can list only custom entries. To - get a list of both custom and automatically created entries, use - [SearchCatalog][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_list_entries(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.ListEntriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entries(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.ListEntriesRequest, dict]]): - The request object. Request message for - [ListEntries][google.cloud.datacatalog.v1.DataCatalog.ListEntries]. - parent (:class:`str`): - Required. The name of the entry group - that contains the entries to list. - Can be provided in URL format. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.services.data_catalog.pagers.ListEntriesAsyncPager: - Response message for - [ListEntries][google.cloud.datacatalog.v1.DataCatalog.ListEntries]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.ListEntriesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_entries, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListEntriesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def modify_entry_overview(self, - request: Optional[Union[datacatalog.ModifyEntryOverviewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.EntryOverview: - r"""Modifies entry overview, part of the business context of an - [Entry][google.cloud.datacatalog.v1.Entry]. - - To call this method, you must have the - ``datacatalog.entries.updateOverview`` IAM permission on the - corresponding project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_modify_entry_overview(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.ModifyEntryOverviewRequest( - name="name_value", - ) - - # Make the request - response = await client.modify_entry_overview(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.ModifyEntryOverviewRequest, dict]]): - The request object. Request message for - [ModifyEntryOverview][google.cloud.datacatalog.v1.DataCatalog.ModifyEntryOverview]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.EntryOverview: - Entry overview fields for rich text - descriptions of entries. - - """ - # Create or coerce a protobuf request object. - request = datacatalog.ModifyEntryOverviewRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.modify_entry_overview, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def modify_entry_contacts(self, - request: Optional[Union[datacatalog.ModifyEntryContactsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.Contacts: - r"""Modifies contacts, part of the business context of an - [Entry][google.cloud.datacatalog.v1.Entry]. - - To call this method, you must have the - ``datacatalog.entries.updateContacts`` IAM permission on the - corresponding project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_modify_entry_contacts(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.ModifyEntryContactsRequest( - name="name_value", - ) - - # Make the request - response = await client.modify_entry_contacts(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.ModifyEntryContactsRequest, dict]]): - The request object. Request message for - [ModifyEntryContacts][google.cloud.datacatalog.v1.DataCatalog.ModifyEntryContacts]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.Contacts: - Contact people for the entry. - """ - # Create or coerce a protobuf request object. - request = datacatalog.ModifyEntryContactsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.modify_entry_contacts, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_tag_template(self, - request: Optional[Union[datacatalog.CreateTagTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - tag_template_id: Optional[str] = None, - tag_template: Optional[tags.TagTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplate: - r"""Creates a tag template. - - You must enable the Data Catalog API in the project identified - by the ``parent`` parameter. For more information, see [Data - Catalog resource project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_create_tag_template(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.CreateTagTemplateRequest( - parent="parent_value", - tag_template_id="tag_template_id_value", - ) - - # Make the request - response = await client.create_tag_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.CreateTagTemplateRequest, dict]]): - The request object. Request message for - [CreateTagTemplate][google.cloud.datacatalog.v1.DataCatalog.CreateTagTemplate]. - parent (:class:`str`): - Required. The name of the project and the template - location - `region `__. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag_template_id (:class:`str`): - Required. The ID of the tag template to create. - - The ID must contain only lowercase letters (a-z), - numbers (0-9), or underscores (_), and must start with a - letter or underscore. The maximum size is 64 bytes when - encoded in UTF-8. - - This corresponds to the ``tag_template_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag_template (:class:`google.cloud.datacatalog_v1.types.TagTemplate`): - Required. The tag template to create. - This corresponds to the ``tag_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.TagTemplate: - A tag template defines a tag that can have one or more - typed fields. - - The template is used to create tags that are attached to Google Cloud - resources. [Tag template roles] - - (https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) - provide permissions to create, edit, and use the - template. For example, see the [TagTemplate User] - (https://cloud.google.com/data-catalog/docs/how-to/template-user) - role that includes a permission to use the tag - template to tag resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, tag_template_id, tag_template]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.CreateTagTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if tag_template_id is not None: - request.tag_template_id = tag_template_id - if tag_template is not None: - request.tag_template = tag_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_tag_template, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_tag_template(self, - request: Optional[Union[datacatalog.GetTagTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplate: - r"""Gets a tag template. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_get_tag_template(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.GetTagTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_tag_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.GetTagTemplateRequest, dict]]): - The request object. Request message for - [GetTagTemplate][google.cloud.datacatalog.v1.DataCatalog.GetTagTemplate]. - name (:class:`str`): - Required. The name of the tag - template to get. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.TagTemplate: - A tag template defines a tag that can have one or more - typed fields. - - The template is used to create tags that are attached to Google Cloud - resources. [Tag template roles] - - (https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) - provide permissions to create, edit, and use the - template. For example, see the [TagTemplate User] - (https://cloud.google.com/data-catalog/docs/how-to/template-user) - role that includes a permission to use the tag - template to tag resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.GetTagTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_tag_template, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_tag_template(self, - request: Optional[Union[datacatalog.UpdateTagTemplateRequest, dict]] = None, - *, - tag_template: Optional[tags.TagTemplate] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplate: - r"""Updates a tag template. - - You can't update template fields with this method. These fields - are separate resources with their own create, update, and delete - methods. - - You must enable the Data Catalog API in the project identified - by the ``tag_template.name`` parameter. For more information, - see `Data Catalog resource - project `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_update_tag_template(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.UpdateTagTemplateRequest( - ) - - # Make the request - response = await client.update_tag_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.UpdateTagTemplateRequest, dict]]): - The request object. Request message for - [UpdateTagTemplate][google.cloud.datacatalog.v1.DataCatalog.UpdateTagTemplate]. - tag_template (:class:`google.cloud.datacatalog_v1.types.TagTemplate`): - Required. The template to update. The ``name`` field - must be set. - - This corresponds to the ``tag_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Names of fields whose values to overwrite on a tag - template. Currently, only ``display_name`` and - ``is_publicly_readable`` can be overwritten. - - If this parameter is absent or empty, all modifiable - fields are overwritten. If such fields are non-required - and omitted in the request body, their values are - emptied. - - Note: Updating the ``is_publicly_readable`` field may - require up to 12 hours to take effect in search results. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.TagTemplate: - A tag template defines a tag that can have one or more - typed fields. - - The template is used to create tags that are attached to Google Cloud - resources. [Tag template roles] - - (https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) - provide permissions to create, edit, and use the - template. For example, see the [TagTemplate User] - (https://cloud.google.com/data-catalog/docs/how-to/template-user) - role that includes a permission to use the tag - template to tag resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([tag_template, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.UpdateTagTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if tag_template is not None: - request.tag_template = tag_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_tag_template, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("tag_template.name", request.tag_template.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_tag_template(self, - request: Optional[Union[datacatalog.DeleteTagTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - force: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a tag template and all tags that use it. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see `Data - Catalog resource - project `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_delete_tag_template(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteTagTemplateRequest( - name="name_value", - force=True, - ) - - # Make the request - await client.delete_tag_template(request=request) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.DeleteTagTemplateRequest, dict]]): - The request object. Request message for - [DeleteTagTemplate][google.cloud.datacatalog.v1.DataCatalog.DeleteTagTemplate]. - name (:class:`str`): - Required. The name of the tag - template to delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - force (:class:`bool`): - Required. If true, deletes all tags that use this - template. - - Currently, ``true`` is the only supported value. - - This corresponds to the ``force`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, force]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.DeleteTagTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if force is not None: - request.force = force - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_tag_template, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_tag_template_field(self, - request: Optional[Union[datacatalog.CreateTagTemplateFieldRequest, dict]] = None, - *, - parent: Optional[str] = None, - tag_template_field_id: Optional[str] = None, - tag_template_field: Optional[tags.TagTemplateField] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplateField: - r"""Creates a field in a tag template. - - You must enable the Data Catalog API in the project identified - by the ``parent`` parameter. For more information, see `Data - Catalog resource - project `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_create_tag_template_field(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - tag_template_field = datacatalog_v1.TagTemplateField() - tag_template_field.type_.primitive_type = "RICHTEXT" - - request = datacatalog_v1.CreateTagTemplateFieldRequest( - parent="parent_value", - tag_template_field_id="tag_template_field_id_value", - tag_template_field=tag_template_field, - ) - - # Make the request - response = await client.create_tag_template_field(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.CreateTagTemplateFieldRequest, dict]]): - The request object. Request message for - [CreateTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.CreateTagTemplateField]. - parent (:class:`str`): - Required. The name of the project and the template - location - `region `__. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag_template_field_id (:class:`str`): - Required. The ID of the tag template field to create. - - Note: Adding a required field to an existing template is - *not* allowed. - - Field IDs can contain letters (both uppercase and - lowercase), numbers (0-9), underscores (_) and dashes - (-). Field IDs must be at least 1 character long and at - most 128 characters long. Field IDs must also be unique - within their template. - - This corresponds to the ``tag_template_field_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag_template_field (:class:`google.cloud.datacatalog_v1.types.TagTemplateField`): - Required. The tag template field to - create. - - This corresponds to the ``tag_template_field`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.TagTemplateField: - The template for an individual field - within a tag template. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, tag_template_field_id, tag_template_field]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.CreateTagTemplateFieldRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if tag_template_field_id is not None: - request.tag_template_field_id = tag_template_field_id - if tag_template_field is not None: - request.tag_template_field = tag_template_field - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_tag_template_field, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_tag_template_field(self, - request: Optional[Union[datacatalog.UpdateTagTemplateFieldRequest, dict]] = None, - *, - name: Optional[str] = None, - tag_template_field: Optional[tags.TagTemplateField] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplateField: - r"""Updates a field in a tag template. - - You can't update the field type with this method. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see `Data - Catalog resource - project `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_update_tag_template_field(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - tag_template_field = datacatalog_v1.TagTemplateField() - tag_template_field.type_.primitive_type = "RICHTEXT" - - request = datacatalog_v1.UpdateTagTemplateFieldRequest( - name="name_value", - tag_template_field=tag_template_field, - ) - - # Make the request - response = await client.update_tag_template_field(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.UpdateTagTemplateFieldRequest, dict]]): - The request object. Request message for - [UpdateTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.UpdateTagTemplateField]. - name (:class:`str`): - Required. The name of the tag - template field. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag_template_field (:class:`google.cloud.datacatalog_v1.types.TagTemplateField`): - Required. The template to update. - This corresponds to the ``tag_template_field`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Optional. Names of fields whose values to overwrite on - an individual field of a tag template. The following - fields are modifiable: - - - ``display_name`` - - ``type.enum_type`` - - ``is_required`` - - If this parameter is absent or empty, all modifiable - fields are overwritten. If such fields are non-required - and omitted in the request body, their values are - emptied with one exception: when updating an enum type, - the provided values are merged with the existing values. - Therefore, enum values can only be added, existing enum - values cannot be deleted or renamed. - - Additionally, updating a template field from optional to - required is *not* allowed. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.TagTemplateField: - The template for an individual field - within a tag template. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, tag_template_field, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.UpdateTagTemplateFieldRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if tag_template_field is not None: - request.tag_template_field = tag_template_field - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_tag_template_field, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def rename_tag_template_field(self, - request: Optional[Union[datacatalog.RenameTagTemplateFieldRequest, dict]] = None, - *, - name: Optional[str] = None, - new_tag_template_field_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplateField: - r"""Renames a field in a tag template. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see [Data - Catalog resource project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_rename_tag_template_field(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.RenameTagTemplateFieldRequest( - name="name_value", - new_tag_template_field_id="new_tag_template_field_id_value", - ) - - # Make the request - response = await client.rename_tag_template_field(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.RenameTagTemplateFieldRequest, dict]]): - The request object. Request message for - [RenameTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateField]. - name (:class:`str`): - Required. The name of the tag - template field. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - new_tag_template_field_id (:class:`str`): - Required. The new ID of this tag template field. For - example, ``my_new_field``. - - This corresponds to the ``new_tag_template_field_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.TagTemplateField: - The template for an individual field - within a tag template. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, new_tag_template_field_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.RenameTagTemplateFieldRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if new_tag_template_field_id is not None: - request.new_tag_template_field_id = new_tag_template_field_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.rename_tag_template_field, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def rename_tag_template_field_enum_value(self, - request: Optional[Union[datacatalog.RenameTagTemplateFieldEnumValueRequest, dict]] = None, - *, - name: Optional[str] = None, - new_enum_value_display_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplateField: - r"""Renames an enum value in a tag template. - - Within a single enum field, enum values must be unique. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_rename_tag_template_field_enum_value(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.RenameTagTemplateFieldEnumValueRequest( - name="name_value", - new_enum_value_display_name="new_enum_value_display_name_value", - ) - - # Make the request - response = await client.rename_tag_template_field_enum_value(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.RenameTagTemplateFieldEnumValueRequest, dict]]): - The request object. Request message for - [RenameTagTemplateFieldEnumValue][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateFieldEnumValue]. - name (:class:`str`): - Required. The name of the enum field - value. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - new_enum_value_display_name (:class:`str`): - Required. The new display name of the enum value. For - example, ``my_new_enum_value``. - - This corresponds to the ``new_enum_value_display_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.TagTemplateField: - The template for an individual field - within a tag template. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, new_enum_value_display_name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.RenameTagTemplateFieldEnumValueRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if new_enum_value_display_name is not None: - request.new_enum_value_display_name = new_enum_value_display_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.rename_tag_template_field_enum_value, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_tag_template_field(self, - request: Optional[Union[datacatalog.DeleteTagTemplateFieldRequest, dict]] = None, - *, - name: Optional[str] = None, - force: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a field in a tag template and all uses of this field - from the tags based on this template. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see `Data - Catalog resource - project `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_delete_tag_template_field(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteTagTemplateFieldRequest( - name="name_value", - force=True, - ) - - # Make the request - await client.delete_tag_template_field(request=request) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.DeleteTagTemplateFieldRequest, dict]]): - The request object. Request message for - [DeleteTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.DeleteTagTemplateField]. - name (:class:`str`): - Required. The name of the tag - template field to delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - force (:class:`bool`): - Required. If true, deletes this field from any tags that - use it. - - Currently, ``true`` is the only supported value. - - This corresponds to the ``force`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, force]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.DeleteTagTemplateFieldRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if force is not None: - request.force = force - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_tag_template_field, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_tag(self, - request: Optional[Union[datacatalog.CreateTagRequest, dict]] = None, - *, - parent: Optional[str] = None, - tag: Optional[tags.Tag] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.Tag: - r"""Creates a tag and assigns it to: - - - An [Entry][google.cloud.datacatalog.v1.Entry] if the method - name is - ``projects.locations.entryGroups.entries.tags.create``. - - Or [EntryGroup][google.cloud.datacatalog.v1.EntryGroup]if the - method name is - ``projects.locations.entryGroups.tags.create``. - - Note: The project identified by the ``parent`` parameter for the - [tag] - (https://cloud.google.com/data-catalog/docs/reference/rest/v1/projects.locations.entryGroups.entries.tags/create#path-parameters) - and the [tag template] - (https://cloud.google.com/data-catalog/docs/reference/rest/v1/projects.locations.tagTemplates/create#path-parameters) - used to create the tag must be in the same organization. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_create_tag(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - tag = datacatalog_v1.Tag() - tag.column = "column_value" - tag.template = "template_value" - - request = datacatalog_v1.CreateTagRequest( - parent="parent_value", - tag=tag, - ) - - # Make the request - response = await client.create_tag(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.CreateTagRequest, dict]]): - The request object. Request message for - [CreateTag][google.cloud.datacatalog.v1.DataCatalog.CreateTag]. - parent (:class:`str`): - Required. The name of the resource to - attach this tag to. - Tags can be attached to entries or entry - groups. An entry can have up to 1000 - attached tags. - - Note: The tag and its child resources - might not be stored in the location - specified in its name. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag (:class:`google.cloud.datacatalog_v1.types.Tag`): - Required. The tag to create. - This corresponds to the ``tag`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.Tag: - Tags contain custom metadata and are attached to Data Catalog resources. Tags - conform with the specification of their tag template. - - See [Data Catalog - IAM](\ https://cloud.google.com/data-catalog/docs/concepts/iam) - for information on the permissions needed to create - or view tags. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, tag]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.CreateTagRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if tag is not None: - request.tag = tag - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_tag, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_tag(self, - request: Optional[Union[datacatalog.UpdateTagRequest, dict]] = None, - *, - tag: Optional[tags.Tag] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.Tag: - r"""Updates an existing tag. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_update_tag(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - tag = datacatalog_v1.Tag() - tag.column = "column_value" - tag.template = "template_value" - - request = datacatalog_v1.UpdateTagRequest( - tag=tag, - ) - - # Make the request - response = await client.update_tag(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.UpdateTagRequest, dict]]): - The request object. Request message for - [UpdateTag][google.cloud.datacatalog.v1.DataCatalog.UpdateTag]. - tag (:class:`google.cloud.datacatalog_v1.types.Tag`): - Required. The updated tag. The "name" - field must be set. - - This corresponds to the ``tag`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Names of fields whose values to overwrite on a tag. - Currently, a tag has the only modifiable field with the - name ``fields``. - - In general, if this parameter is absent or empty, all - modifiable fields are overwritten. If such fields are - non-required and omitted in the request body, their - values are emptied. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.Tag: - Tags contain custom metadata and are attached to Data Catalog resources. Tags - conform with the specification of their tag template. - - See [Data Catalog - IAM](\ https://cloud.google.com/data-catalog/docs/concepts/iam) - for information on the permissions needed to create - or view tags. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([tag, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.UpdateTagRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if tag is not None: - request.tag = tag - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_tag, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("tag.name", request.tag.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_tag(self, - request: Optional[Union[datacatalog.DeleteTagRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a tag. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_delete_tag(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteTagRequest( - name="name_value", - ) - - # Make the request - await client.delete_tag(request=request) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.DeleteTagRequest, dict]]): - The request object. Request message for - [DeleteTag][google.cloud.datacatalog.v1.DataCatalog.DeleteTag]. - name (:class:`str`): - Required. The name of the tag to - delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.DeleteTagRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_tag, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_tags(self, - request: Optional[Union[datacatalog.ListTagsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTagsAsyncPager: - r"""Lists tags assigned to an - [Entry][google.cloud.datacatalog.v1.Entry]. The - [columns][google.cloud.datacatalog.v1.Tag.column] in the - response are lowercased. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_list_tags(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.ListTagsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tags(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.ListTagsRequest, dict]]): - The request object. Request message for - [ListTags][google.cloud.datacatalog.v1.DataCatalog.ListTags]. - parent (:class:`str`): - Required. The name of the Data Catalog resource to list - the tags of. - - The resource can be an - [Entry][google.cloud.datacatalog.v1.Entry] or an - [EntryGroup][google.cloud.datacatalog.v1.EntryGroup] - (without ``/entries/{entries}`` at the end). - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.services.data_catalog.pagers.ListTagsAsyncPager: - Response message for - [ListTags][google.cloud.datacatalog.v1.DataCatalog.ListTags]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.ListTagsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_tags, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListTagsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def reconcile_tags(self, - request: Optional[Union[datacatalog.ReconcileTagsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""``ReconcileTags`` creates or updates a list of tags on the - entry. If the - [ReconcileTagsRequest.force_delete_missing][google.cloud.datacatalog.v1.ReconcileTagsRequest.force_delete_missing] - parameter is set, the operation deletes tags not included in the - input tag list. - - ``ReconcileTags`` returns a [long-running operation] - [google.longrunning.Operation] resource that can be queried with - [Operations.GetOperation][google.longrunning.Operations.GetOperation] - to return [ReconcileTagsMetadata] - [google.cloud.datacatalog.v1.ReconcileTagsMetadata] and a - [ReconcileTagsResponse] - [google.cloud.datacatalog.v1.ReconcileTagsResponse] message. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_reconcile_tags(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.ReconcileTagsRequest( - parent="parent_value", - tag_template="tag_template_value", - ) - - # Make the request - operation = client.reconcile_tags(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.ReconcileTagsRequest, dict]]): - The request object. Request message for - [ReconcileTags][google.cloud.datacatalog.v1.DataCatalog.ReconcileTags]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.datacatalog_v1.types.ReconcileTagsResponse` [Long-running operation][google.longrunning.Operation] - response message returned by - [ReconcileTags][google.cloud.datacatalog.v1.DataCatalog.ReconcileTags]. - - """ - # Create or coerce a protobuf request object. - request = datacatalog.ReconcileTagsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.reconcile_tags, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - datacatalog.ReconcileTagsResponse, - metadata_type=datacatalog.ReconcileTagsMetadata, - ) - - # Done; return the response. - return response - - async def star_entry(self, - request: Optional[Union[datacatalog.StarEntryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.StarEntryResponse: - r"""Marks an [Entry][google.cloud.datacatalog.v1.Entry] as starred - by the current user. Starring information is private to each - user. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_star_entry(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.StarEntryRequest( - name="name_value", - ) - - # Make the request - response = await client.star_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.StarEntryRequest, dict]]): - The request object. Request message for - [StarEntry][google.cloud.datacatalog.v1.DataCatalog.StarEntry]. - name (:class:`str`): - Required. The name of the entry to - mark as starred. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.StarEntryResponse: - Response message for - [StarEntry][google.cloud.datacatalog.v1.DataCatalog.StarEntry]. - Empty for now - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.StarEntryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.star_entry, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def unstar_entry(self, - request: Optional[Union[datacatalog.UnstarEntryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.UnstarEntryResponse: - r"""Marks an [Entry][google.cloud.datacatalog.v1.Entry] as NOT - starred by the current user. Starring information is private to - each user. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_unstar_entry(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.UnstarEntryRequest( - name="name_value", - ) - - # Make the request - response = await client.unstar_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.UnstarEntryRequest, dict]]): - The request object. Request message for - [UnstarEntry][google.cloud.datacatalog.v1.DataCatalog.UnstarEntry]. - name (:class:`str`): - Required. The name of the entry to mark as **not** - starred. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.UnstarEntryResponse: - Response message for - [UnstarEntry][google.cloud.datacatalog.v1.DataCatalog.UnstarEntry]. - Empty for now - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.UnstarEntryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.unstar_entry, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets an access control policy for a resource. Replaces any - existing policy. - - Supported resources are: - - - Tag templates - - Entry groups - - Note: This method sets policies only within Data Catalog and - can't be used to manage policies in BigQuery, Pub/Sub, Dataproc - Metastore, and any external Google Cloud Platform resources - synced with the Data Catalog. - - To call this method, you must have the following Google IAM - permissions: - - - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on - tag templates. - - ``datacatalog.entryGroups.setIamPolicy`` to set policies on - entry groups. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_set_iam_policy(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): - The request object. Request message for ``SetIamPolicy`` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, ) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for a resource. - - May return: - - - A\ ``NOT_FOUND`` error if the resource doesn't exist or you - don't have the permission to view it. - - An empty policy if the resource exists but doesn't have a set - policy. - - Supported resources are: - - - Tag templates - - Entry groups - - Note: This method doesn't get policies from Google Cloud - Platform resources ingested into Data Catalog. - - To call this method, you must have the following Google IAM - permissions: - - - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on - tag templates. - - ``datacatalog.entryGroups.getIamPolicy`` to get policies on - entry groups. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_get_iam_policy(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): - The request object. Request message for ``GetIamPolicy`` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.GetIamPolicyRequest(resource=resource, ) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Gets your permissions on a resource. - - Returns an empty set of permissions if the resource - doesn't exist. - - Supported resources are: - - - Tag templates - - Entry groups - - Note: This method gets policies only within Data Catalog - and can't be used to get policies from BigQuery, - Pub/Sub, Dataproc Metastore, and any external Google - Cloud Platform resources ingested into Data Catalog. - - No Google IAM permissions are required to call this - method. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_test_iam_permissions(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def import_entries(self, - request: Optional[Union[datacatalog.ImportEntriesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Imports entries from a source, such as data previously dumped - into a Cloud Storage bucket, into Data Catalog. Import of - entries is a sync operation that reconciles the state of the - third-party system with the Data Catalog. - - ``ImportEntries`` accepts source data snapshots of a third-party - system. Snapshot should be delivered as a .wire or - base65-encoded .txt file containing a sequence of Protocol - Buffer messages of - [DumpItem][google.cloud.datacatalog.v1.DumpItem] type. - - ``ImportEntries`` returns a [long-running operation] - [google.longrunning.Operation] resource that can be queried with - [Operations.GetOperation][google.longrunning.Operations.GetOperation] - to return - [ImportEntriesMetadata][google.cloud.datacatalog.v1.ImportEntriesMetadata] - and an - [ImportEntriesResponse][google.cloud.datacatalog.v1.ImportEntriesResponse] - message. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_import_entries(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.ImportEntriesRequest( - gcs_bucket_path="gcs_bucket_path_value", - parent="parent_value", - ) - - # Make the request - operation = client.import_entries(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.ImportEntriesRequest, dict]]): - The request object. Request message for - [ImportEntries][google.cloud.datacatalog.v1.DataCatalog.ImportEntries] - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.datacatalog_v1.types.ImportEntriesResponse` Response message for [long-running operation][google.longrunning.Operation] - returned by the - [ImportEntries][google.cloud.datacatalog.v1.DataCatalog.ImportEntries]. - - """ - # Create or coerce a protobuf request object. - request = datacatalog.ImportEntriesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.import_entries, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - datacatalog.ImportEntriesResponse, - metadata_type=datacatalog.ImportEntriesMetadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def __aenter__(self) -> "DataCatalogAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DataCatalogAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/client.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/client.py deleted file mode 100644 index c3a254ba06cc..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/client.py +++ /dev/null @@ -1,4761 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.datacatalog_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.datacatalog_v1.services.data_catalog import pagers -from google.cloud.datacatalog_v1.types import common -from google.cloud.datacatalog_v1.types import data_source -from google.cloud.datacatalog_v1.types import datacatalog -from google.cloud.datacatalog_v1.types import gcs_fileset_spec -from google.cloud.datacatalog_v1.types import schema -from google.cloud.datacatalog_v1.types import search -from google.cloud.datacatalog_v1.types import table_spec -from google.cloud.datacatalog_v1.types import tags -from google.cloud.datacatalog_v1.types import timestamps -from google.cloud.datacatalog_v1.types import usage -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import DataCatalogGrpcTransport -from .transports.grpc_asyncio import DataCatalogGrpcAsyncIOTransport - - -class DataCatalogClientMeta(type): - """Metaclass for the DataCatalog client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[DataCatalogTransport]] - _transport_registry["grpc"] = DataCatalogGrpcTransport - _transport_registry["grpc_asyncio"] = DataCatalogGrpcAsyncIOTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[DataCatalogTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class DataCatalogClient(metaclass=DataCatalogClientMeta): - """Data Catalog API service allows you to discover, understand, - and manage your data. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "datacatalog.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataCatalogClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataCatalogClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DataCatalogTransport: - """Returns the transport used by the client instance. - - Returns: - DataCatalogTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def entry_path(project: str,location: str,entry_group: str,entry: str,) -> str: - """Returns a fully-qualified entry string.""" - return "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format(project=project, location=location, entry_group=entry_group, entry=entry, ) - - @staticmethod - def parse_entry_path(path: str) -> Dict[str,str]: - """Parses a entry path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)/entries/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def entry_group_path(project: str,location: str,entry_group: str,) -> str: - """Returns a fully-qualified entry_group string.""" - return "projects/{project}/locations/{location}/entryGroups/{entry_group}".format(project=project, location=location, entry_group=entry_group, ) - - @staticmethod - def parse_entry_group_path(path: str) -> Dict[str,str]: - """Parses a entry_group path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def tag_path(project: str,location: str,entry_group: str,entry: str,tag: str,) -> str: - """Returns a fully-qualified tag string.""" - return "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format(project=project, location=location, entry_group=entry_group, entry=entry, tag=tag, ) - - @staticmethod - def parse_tag_path(path: str) -> Dict[str,str]: - """Parses a tag path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)/entries/(?P.+?)/tags/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def tag_template_path(project: str,location: str,tag_template: str,) -> str: - """Returns a fully-qualified tag_template string.""" - return "projects/{project}/locations/{location}/tagTemplates/{tag_template}".format(project=project, location=location, tag_template=tag_template, ) - - @staticmethod - def parse_tag_template_path(path: str) -> Dict[str,str]: - """Parses a tag_template path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/tagTemplates/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def tag_template_field_path(project: str,location: str,tag_template: str,field: str,) -> str: - """Returns a fully-qualified tag_template_field string.""" - return "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}".format(project=project, location=location, tag_template=tag_template, field=field, ) - - @staticmethod - def parse_tag_template_field_path(path: str) -> Dict[str,str]: - """Parses a tag_template_field path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/tagTemplates/(?P.+?)/fields/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def tag_template_field_enum_value_path(project: str,location: str,tag_template: str,tag_template_field_id: str,enum_value_display_name: str,) -> str: - """Returns a fully-qualified tag_template_field_enum_value string.""" - return "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name}".format(project=project, location=location, tag_template=tag_template, tag_template_field_id=tag_template_field_id, enum_value_display_name=enum_value_display_name, ) - - @staticmethod - def parse_tag_template_field_enum_value_path(path: str) -> Dict[str,str]: - """Parses a tag_template_field_enum_value path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/tagTemplates/(?P.+?)/fields/(?P.+?)/enumValues/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataCatalogTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the data catalog client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, DataCatalogTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, DataCatalogTransport): - # transport is a DataCatalogTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def search_catalog(self, - request: Optional[Union[datacatalog.SearchCatalogRequest, dict]] = None, - *, - scope: Optional[datacatalog.SearchCatalogRequest.Scope] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.SearchCatalogPager: - r"""Searches Data Catalog for multiple resources like entries and - tags that match a query. - - This is a [Custom Method] - (https://cloud.google.com/apis/design/custom_methods) that - doesn't return all information on a resource, only its ID and - high level fields. To get more information, you can subsequently - call specific get methods. - - Note: Data Catalog search queries don't guarantee full recall. - Results that match your query might not be returned, even in - subsequent result pages. Additionally, returned (and not - returned) results can vary if you repeat search queries. - - For more information, see [Data Catalog search syntax] - (https://cloud.google.com/data-catalog/docs/how-to/search-reference). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_search_catalog(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.SearchCatalogRequest( - ) - - # Make the request - page_result = client.search_catalog(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.SearchCatalogRequest, dict]): - The request object. Request message for - [SearchCatalog][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog]. - scope (google.cloud.datacatalog_v1.types.SearchCatalogRequest.Scope): - Required. The scope of this search request. - - The ``scope`` is invalid if ``include_org_ids``, - ``include_project_ids`` are empty AND - ``include_gcp_public_datasets`` is set to ``false``. In - this case, the request returns an error. - - This corresponds to the ``scope`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - query (str): - Optional. The query string with a minimum of 3 - characters and specific syntax. For more information, - see `Data Catalog search - syntax `__. - - An empty query string returns all data assets (in the - specified scope) that you have access to. - - A query string can be a simple ``xyz`` or qualified by - predicates: - - - ``name:x`` - - ``column:y`` - - ``description:z`` - - This corresponds to the ``query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.services.data_catalog.pagers.SearchCatalogPager: - Response message for - [SearchCatalog][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, query]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.SearchCatalogRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.SearchCatalogRequest): - request = datacatalog.SearchCatalogRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if scope is not None: - request.scope = scope - if query is not None: - request.query = query - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.search_catalog] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.SearchCatalogPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_entry_group(self, - request: Optional[Union[datacatalog.CreateEntryGroupRequest, dict]] = None, - *, - parent: Optional[str] = None, - entry_group_id: Optional[str] = None, - entry_group: Optional[datacatalog.EntryGroup] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.EntryGroup: - r"""Creates an entry group. - - An entry group contains logically related entries together with - `Cloud Identity and Access - Management `__ policies. These - policies specify users who can create, edit, and view entries - within entry groups. - - Data Catalog automatically creates entry groups with names that - start with the ``@`` symbol for the following resources: - - - BigQuery entries (``@bigquery``) - - Pub/Sub topics (``@pubsub``) - - Dataproc Metastore services - (``@dataproc_metastore_{SERVICE_NAME_HASH}``) - - You can create your own entry groups for Cloud Storage fileset - entries and custom entries together with the corresponding IAM - policies. User-created entry groups can't contain the ``@`` - symbol, it is reserved for automatically created groups. - - Entry groups, like entries, can be searched. - - A maximum of 10,000 entry groups may be created per organization - across all locations. - - You must enable the Data Catalog API in the project identified - by the ``parent`` parameter. For more information, see `Data - Catalog resource - project `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_create_entry_group(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.CreateEntryGroupRequest( - parent="parent_value", - entry_group_id="entry_group_id_value", - ) - - # Make the request - response = client.create_entry_group(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.CreateEntryGroupRequest, dict]): - The request object. Request message for - [CreateEntryGroup][google.cloud.datacatalog.v1.DataCatalog.CreateEntryGroup]. - parent (str): - Required. The names of the project - and location that the new entry group - belongs to. - - Note: The entry group itself and its - child resources might not be stored in - the location specified in its name. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_group_id (str): - Required. The ID of the entry group to create. - - The ID must contain only letters (a-z, A-Z), numbers - (0-9), underscores (_), and must start with a letter or - underscore. The maximum size is 64 bytes when encoded in - UTF-8. - - This corresponds to the ``entry_group_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_group (google.cloud.datacatalog_v1.types.EntryGroup): - The entry group to create. Defaults - to empty. - - This corresponds to the ``entry_group`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.EntryGroup: - Entry group metadata. - - An EntryGroup resource represents a logical grouping - of zero or more Data Catalog - [Entry][google.cloud.datacatalog.v1.Entry] resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, entry_group_id, entry_group]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.CreateEntryGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.CreateEntryGroupRequest): - request = datacatalog.CreateEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entry_group_id is not None: - request.entry_group_id = entry_group_id - if entry_group is not None: - request.entry_group = entry_group - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_entry_group(self, - request: Optional[Union[datacatalog.GetEntryGroupRequest, dict]] = None, - *, - name: Optional[str] = None, - read_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.EntryGroup: - r"""Gets an entry group. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_get_entry_group(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.GetEntryGroupRequest( - name="name_value", - ) - - # Make the request - response = client.get_entry_group(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.GetEntryGroupRequest, dict]): - The request object. Request message for - [GetEntryGroup][google.cloud.datacatalog.v1.DataCatalog.GetEntryGroup]. - name (str): - Required. The name of the entry group - to get. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - read_mask (google.protobuf.field_mask_pb2.FieldMask): - The fields to return. If empty or - omitted, all fields are returned. - - This corresponds to the ``read_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.EntryGroup: - Entry group metadata. - - An EntryGroup resource represents a logical grouping - of zero or more Data Catalog - [Entry][google.cloud.datacatalog.v1.Entry] resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, read_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.GetEntryGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.GetEntryGroupRequest): - request = datacatalog.GetEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if read_mask is not None: - request.read_mask = read_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_entry_group(self, - request: Optional[Union[datacatalog.UpdateEntryGroupRequest, dict]] = None, - *, - entry_group: Optional[datacatalog.EntryGroup] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.EntryGroup: - r"""Updates an entry group. - - You must enable the Data Catalog API in the project identified - by the ``entry_group.name`` parameter. For more information, see - `Data Catalog resource - project `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_update_entry_group(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.UpdateEntryGroupRequest( - ) - - # Make the request - response = client.update_entry_group(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.UpdateEntryGroupRequest, dict]): - The request object. Request message for - [UpdateEntryGroup][google.cloud.datacatalog.v1.DataCatalog.UpdateEntryGroup]. - entry_group (google.cloud.datacatalog_v1.types.EntryGroup): - Required. Updates for the entry group. The ``name`` - field must be set. - - This corresponds to the ``entry_group`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Names of fields whose values to - overwrite on an entry group. - If this parameter is absent or empty, - all modifiable fields are overwritten. - If such fields are non-required and - omitted in the request body, their - values are emptied. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.EntryGroup: - Entry group metadata. - - An EntryGroup resource represents a logical grouping - of zero or more Data Catalog - [Entry][google.cloud.datacatalog.v1.Entry] resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([entry_group, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.UpdateEntryGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.UpdateEntryGroupRequest): - request = datacatalog.UpdateEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if entry_group is not None: - request.entry_group = entry_group - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entry_group.name", request.entry_group.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_entry_group(self, - request: Optional[Union[datacatalog.DeleteEntryGroupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an entry group. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see `Data - Catalog resource - project `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_delete_entry_group(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteEntryGroupRequest( - name="name_value", - ) - - # Make the request - client.delete_entry_group(request=request) - - Args: - request (Union[google.cloud.datacatalog_v1.types.DeleteEntryGroupRequest, dict]): - The request object. Request message for - [DeleteEntryGroup][google.cloud.datacatalog.v1.DataCatalog.DeleteEntryGroup]. - name (str): - Required. The name of the entry group - to delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.DeleteEntryGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.DeleteEntryGroupRequest): - request = datacatalog.DeleteEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def list_entry_groups(self, - request: Optional[Union[datacatalog.ListEntryGroupsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListEntryGroupsPager: - r"""Lists entry groups. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_list_entry_groups(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.ListEntryGroupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_groups(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.ListEntryGroupsRequest, dict]): - The request object. Request message for - [ListEntryGroups][google.cloud.datacatalog.v1.DataCatalog.ListEntryGroups]. - parent (str): - Required. The name of the location - that contains the entry groups to list. - Can be provided as a URL. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.services.data_catalog.pagers.ListEntryGroupsPager: - Response message for - [ListEntryGroups][google.cloud.datacatalog.v1.DataCatalog.ListEntryGroups]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.ListEntryGroupsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.ListEntryGroupsRequest): - request = datacatalog.ListEntryGroupsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_entry_groups] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListEntryGroupsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_entry(self, - request: Optional[Union[datacatalog.CreateEntryRequest, dict]] = None, - *, - parent: Optional[str] = None, - entry_id: Optional[str] = None, - entry: Optional[datacatalog.Entry] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.Entry: - r"""Creates an entry. - - You can create entries only with 'FILESET', 'CLUSTER', - 'DATA_STREAM', or custom types. Data Catalog automatically - creates entries with other types during metadata ingestion from - integrated systems. - - You must enable the Data Catalog API in the project identified - by the ``parent`` parameter. For more information, see `Data - Catalog resource - project `__. - - An entry group can have a maximum of 100,000 entries. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_create_entry(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - entry = datacatalog_v1.Entry() - entry.type_ = "LOOK" - entry.integrated_system = "VERTEX_AI" - entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] - - request = datacatalog_v1.CreateEntryRequest( - parent="parent_value", - entry_id="entry_id_value", - entry=entry, - ) - - # Make the request - response = client.create_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.CreateEntryRequest, dict]): - The request object. Request message for - [CreateEntry][google.cloud.datacatalog.v1.DataCatalog.CreateEntry]. - parent (str): - Required. The name of the entry group - this entry belongs to. - Note: The entry itself and its child - resources might not be stored in the - location specified in its name. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_id (str): - Required. The ID of the entry to create. - - The ID must contain only letters (a-z, A-Z), numbers - (0-9), and underscores (_). The maximum size is 64 bytes - when encoded in UTF-8. - - This corresponds to the ``entry_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry (google.cloud.datacatalog_v1.types.Entry): - Required. The entry to create. - This corresponds to the ``entry`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.Entry: - Entry metadata. - A Data Catalog entry represents another resource in - Google Cloud Platform (such as a BigQuery dataset or - a Pub/Sub topic) or outside of it. You can use the - linked_resource field in the entry resource to refer - to the original resource ID of the source system. - - An entry resource contains resource details, for - example, its schema. Additionally, you can attach - flexible metadata to an entry in the form of a - [Tag][google.cloud.datacatalog.v1.Tag]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, entry_id, entry]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.CreateEntryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.CreateEntryRequest): - request = datacatalog.CreateEntryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entry_id is not None: - request.entry_id = entry_id - if entry is not None: - request.entry = entry - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_entry(self, - request: Optional[Union[datacatalog.UpdateEntryRequest, dict]] = None, - *, - entry: Optional[datacatalog.Entry] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.Entry: - r"""Updates an existing entry. - - You must enable the Data Catalog API in the project identified - by the ``entry.name`` parameter. For more information, see `Data - Catalog resource - project `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_update_entry(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - entry = datacatalog_v1.Entry() - entry.type_ = "LOOK" - entry.integrated_system = "VERTEX_AI" - entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] - - request = datacatalog_v1.UpdateEntryRequest( - entry=entry, - ) - - # Make the request - response = client.update_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.UpdateEntryRequest, dict]): - The request object. Request message for - [UpdateEntry][google.cloud.datacatalog.v1.DataCatalog.UpdateEntry]. - entry (google.cloud.datacatalog_v1.types.Entry): - Required. Updates for the entry. The ``name`` field must - be set. - - This corresponds to the ``entry`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Names of fields whose values to overwrite on an entry. - - If this parameter is absent or empty, all modifiable - fields are overwritten. If such fields are non-required - and omitted in the request body, their values are - emptied. - - You can modify only the fields listed below. - - For entries with type ``DATA_STREAM``: - - - ``schema`` - - For entries with type ``FILESET``: - - - ``schema`` - - ``display_name`` - - ``description`` - - ``gcs_fileset_spec`` - - ``gcs_fileset_spec.file_patterns`` - - For entries with ``user_specified_type``: - - - ``schema`` - - ``display_name`` - - ``description`` - - ``user_specified_type`` - - ``user_specified_system`` - - ``linked_resource`` - - ``source_system_timestamps`` - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.Entry: - Entry metadata. - A Data Catalog entry represents another resource in - Google Cloud Platform (such as a BigQuery dataset or - a Pub/Sub topic) or outside of it. You can use the - linked_resource field in the entry resource to refer - to the original resource ID of the source system. - - An entry resource contains resource details, for - example, its schema. Additionally, you can attach - flexible metadata to an entry in the form of a - [Tag][google.cloud.datacatalog.v1.Tag]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([entry, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.UpdateEntryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.UpdateEntryRequest): - request = datacatalog.UpdateEntryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if entry is not None: - request.entry = entry - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entry.name", request.entry.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_entry(self, - request: Optional[Union[datacatalog.DeleteEntryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an existing entry. - - You can delete only the entries created by the - [CreateEntry][google.cloud.datacatalog.v1.DataCatalog.CreateEntry] - method. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see `Data - Catalog resource - project `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_delete_entry(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteEntryRequest( - name="name_value", - ) - - # Make the request - client.delete_entry(request=request) - - Args: - request (Union[google.cloud.datacatalog_v1.types.DeleteEntryRequest, dict]): - The request object. Request message for - [DeleteEntry][google.cloud.datacatalog.v1.DataCatalog.DeleteEntry]. - name (str): - Required. The name of the entry to - delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.DeleteEntryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.DeleteEntryRequest): - request = datacatalog.DeleteEntryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_entry(self, - request: Optional[Union[datacatalog.GetEntryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.Entry: - r"""Gets an entry. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_get_entry(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.GetEntryRequest( - name="name_value", - ) - - # Make the request - response = client.get_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.GetEntryRequest, dict]): - The request object. Request message for - [GetEntry][google.cloud.datacatalog.v1.DataCatalog.GetEntry]. - name (str): - Required. The name of the entry to - get. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.Entry: - Entry metadata. - A Data Catalog entry represents another resource in - Google Cloud Platform (such as a BigQuery dataset or - a Pub/Sub topic) or outside of it. You can use the - linked_resource field in the entry resource to refer - to the original resource ID of the source system. - - An entry resource contains resource details, for - example, its schema. Additionally, you can attach - flexible metadata to an entry in the form of a - [Tag][google.cloud.datacatalog.v1.Tag]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.GetEntryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.GetEntryRequest): - request = datacatalog.GetEntryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def lookup_entry(self, - request: Optional[Union[datacatalog.LookupEntryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.Entry: - r"""Gets an entry by its target resource name. - - The resource name comes from the source Google Cloud - Platform service. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_lookup_entry(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.LookupEntryRequest( - linked_resource="linked_resource_value", - ) - - # Make the request - response = client.lookup_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.LookupEntryRequest, dict]): - The request object. Request message for - [LookupEntry][google.cloud.datacatalog.v1.DataCatalog.LookupEntry]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.Entry: - Entry metadata. - A Data Catalog entry represents another resource in - Google Cloud Platform (such as a BigQuery dataset or - a Pub/Sub topic) or outside of it. You can use the - linked_resource field in the entry resource to refer - to the original resource ID of the source system. - - An entry resource contains resource details, for - example, its schema. Additionally, you can attach - flexible metadata to an entry in the form of a - [Tag][google.cloud.datacatalog.v1.Tag]. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.LookupEntryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.LookupEntryRequest): - request = datacatalog.LookupEntryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.lookup_entry] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_entries(self, - request: Optional[Union[datacatalog.ListEntriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListEntriesPager: - r"""Lists entries. - - Note: Currently, this method can list only custom entries. To - get a list of both custom and automatically created entries, use - [SearchCatalog][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_list_entries(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.ListEntriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entries(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.ListEntriesRequest, dict]): - The request object. Request message for - [ListEntries][google.cloud.datacatalog.v1.DataCatalog.ListEntries]. - parent (str): - Required. The name of the entry group - that contains the entries to list. - Can be provided in URL format. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.services.data_catalog.pagers.ListEntriesPager: - Response message for - [ListEntries][google.cloud.datacatalog.v1.DataCatalog.ListEntries]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.ListEntriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.ListEntriesRequest): - request = datacatalog.ListEntriesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_entries] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListEntriesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def modify_entry_overview(self, - request: Optional[Union[datacatalog.ModifyEntryOverviewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.EntryOverview: - r"""Modifies entry overview, part of the business context of an - [Entry][google.cloud.datacatalog.v1.Entry]. - - To call this method, you must have the - ``datacatalog.entries.updateOverview`` IAM permission on the - corresponding project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_modify_entry_overview(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.ModifyEntryOverviewRequest( - name="name_value", - ) - - # Make the request - response = client.modify_entry_overview(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.ModifyEntryOverviewRequest, dict]): - The request object. Request message for - [ModifyEntryOverview][google.cloud.datacatalog.v1.DataCatalog.ModifyEntryOverview]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.EntryOverview: - Entry overview fields for rich text - descriptions of entries. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.ModifyEntryOverviewRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.ModifyEntryOverviewRequest): - request = datacatalog.ModifyEntryOverviewRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.modify_entry_overview] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def modify_entry_contacts(self, - request: Optional[Union[datacatalog.ModifyEntryContactsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.Contacts: - r"""Modifies contacts, part of the business context of an - [Entry][google.cloud.datacatalog.v1.Entry]. - - To call this method, you must have the - ``datacatalog.entries.updateContacts`` IAM permission on the - corresponding project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_modify_entry_contacts(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.ModifyEntryContactsRequest( - name="name_value", - ) - - # Make the request - response = client.modify_entry_contacts(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.ModifyEntryContactsRequest, dict]): - The request object. Request message for - [ModifyEntryContacts][google.cloud.datacatalog.v1.DataCatalog.ModifyEntryContacts]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.Contacts: - Contact people for the entry. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.ModifyEntryContactsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.ModifyEntryContactsRequest): - request = datacatalog.ModifyEntryContactsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.modify_entry_contacts] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_tag_template(self, - request: Optional[Union[datacatalog.CreateTagTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - tag_template_id: Optional[str] = None, - tag_template: Optional[tags.TagTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplate: - r"""Creates a tag template. - - You must enable the Data Catalog API in the project identified - by the ``parent`` parameter. For more information, see [Data - Catalog resource project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_create_tag_template(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.CreateTagTemplateRequest( - parent="parent_value", - tag_template_id="tag_template_id_value", - ) - - # Make the request - response = client.create_tag_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.CreateTagTemplateRequest, dict]): - The request object. Request message for - [CreateTagTemplate][google.cloud.datacatalog.v1.DataCatalog.CreateTagTemplate]. - parent (str): - Required. The name of the project and the template - location - `region `__. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag_template_id (str): - Required. The ID of the tag template to create. - - The ID must contain only lowercase letters (a-z), - numbers (0-9), or underscores (_), and must start with a - letter or underscore. The maximum size is 64 bytes when - encoded in UTF-8. - - This corresponds to the ``tag_template_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag_template (google.cloud.datacatalog_v1.types.TagTemplate): - Required. The tag template to create. - This corresponds to the ``tag_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.TagTemplate: - A tag template defines a tag that can have one or more - typed fields. - - The template is used to create tags that are attached to Google Cloud - resources. [Tag template roles] - - (https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) - provide permissions to create, edit, and use the - template. For example, see the [TagTemplate User] - (https://cloud.google.com/data-catalog/docs/how-to/template-user) - role that includes a permission to use the tag - template to tag resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, tag_template_id, tag_template]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.CreateTagTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.CreateTagTemplateRequest): - request = datacatalog.CreateTagTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if tag_template_id is not None: - request.tag_template_id = tag_template_id - if tag_template is not None: - request.tag_template = tag_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_tag_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_tag_template(self, - request: Optional[Union[datacatalog.GetTagTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplate: - r"""Gets a tag template. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_get_tag_template(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.GetTagTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_tag_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.GetTagTemplateRequest, dict]): - The request object. Request message for - [GetTagTemplate][google.cloud.datacatalog.v1.DataCatalog.GetTagTemplate]. - name (str): - Required. The name of the tag - template to get. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.TagTemplate: - A tag template defines a tag that can have one or more - typed fields. - - The template is used to create tags that are attached to Google Cloud - resources. [Tag template roles] - - (https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) - provide permissions to create, edit, and use the - template. For example, see the [TagTemplate User] - (https://cloud.google.com/data-catalog/docs/how-to/template-user) - role that includes a permission to use the tag - template to tag resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.GetTagTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.GetTagTemplateRequest): - request = datacatalog.GetTagTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_tag_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_tag_template(self, - request: Optional[Union[datacatalog.UpdateTagTemplateRequest, dict]] = None, - *, - tag_template: Optional[tags.TagTemplate] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplate: - r"""Updates a tag template. - - You can't update template fields with this method. These fields - are separate resources with their own create, update, and delete - methods. - - You must enable the Data Catalog API in the project identified - by the ``tag_template.name`` parameter. For more information, - see `Data Catalog resource - project `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_update_tag_template(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.UpdateTagTemplateRequest( - ) - - # Make the request - response = client.update_tag_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.UpdateTagTemplateRequest, dict]): - The request object. Request message for - [UpdateTagTemplate][google.cloud.datacatalog.v1.DataCatalog.UpdateTagTemplate]. - tag_template (google.cloud.datacatalog_v1.types.TagTemplate): - Required. The template to update. The ``name`` field - must be set. - - This corresponds to the ``tag_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Names of fields whose values to overwrite on a tag - template. Currently, only ``display_name`` and - ``is_publicly_readable`` can be overwritten. - - If this parameter is absent or empty, all modifiable - fields are overwritten. If such fields are non-required - and omitted in the request body, their values are - emptied. - - Note: Updating the ``is_publicly_readable`` field may - require up to 12 hours to take effect in search results. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.TagTemplate: - A tag template defines a tag that can have one or more - typed fields. - - The template is used to create tags that are attached to Google Cloud - resources. [Tag template roles] - - (https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) - provide permissions to create, edit, and use the - template. For example, see the [TagTemplate User] - (https://cloud.google.com/data-catalog/docs/how-to/template-user) - role that includes a permission to use the tag - template to tag resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([tag_template, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.UpdateTagTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.UpdateTagTemplateRequest): - request = datacatalog.UpdateTagTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if tag_template is not None: - request.tag_template = tag_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_tag_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("tag_template.name", request.tag_template.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_tag_template(self, - request: Optional[Union[datacatalog.DeleteTagTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - force: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a tag template and all tags that use it. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see `Data - Catalog resource - project `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_delete_tag_template(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteTagTemplateRequest( - name="name_value", - force=True, - ) - - # Make the request - client.delete_tag_template(request=request) - - Args: - request (Union[google.cloud.datacatalog_v1.types.DeleteTagTemplateRequest, dict]): - The request object. Request message for - [DeleteTagTemplate][google.cloud.datacatalog.v1.DataCatalog.DeleteTagTemplate]. - name (str): - Required. The name of the tag - template to delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - force (bool): - Required. If true, deletes all tags that use this - template. - - Currently, ``true`` is the only supported value. - - This corresponds to the ``force`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, force]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.DeleteTagTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.DeleteTagTemplateRequest): - request = datacatalog.DeleteTagTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if force is not None: - request.force = force - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_tag_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_tag_template_field(self, - request: Optional[Union[datacatalog.CreateTagTemplateFieldRequest, dict]] = None, - *, - parent: Optional[str] = None, - tag_template_field_id: Optional[str] = None, - tag_template_field: Optional[tags.TagTemplateField] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplateField: - r"""Creates a field in a tag template. - - You must enable the Data Catalog API in the project identified - by the ``parent`` parameter. For more information, see `Data - Catalog resource - project `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_create_tag_template_field(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - tag_template_field = datacatalog_v1.TagTemplateField() - tag_template_field.type_.primitive_type = "RICHTEXT" - - request = datacatalog_v1.CreateTagTemplateFieldRequest( - parent="parent_value", - tag_template_field_id="tag_template_field_id_value", - tag_template_field=tag_template_field, - ) - - # Make the request - response = client.create_tag_template_field(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.CreateTagTemplateFieldRequest, dict]): - The request object. Request message for - [CreateTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.CreateTagTemplateField]. - parent (str): - Required. The name of the project and the template - location - `region `__. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag_template_field_id (str): - Required. The ID of the tag template field to create. - - Note: Adding a required field to an existing template is - *not* allowed. - - Field IDs can contain letters (both uppercase and - lowercase), numbers (0-9), underscores (_) and dashes - (-). Field IDs must be at least 1 character long and at - most 128 characters long. Field IDs must also be unique - within their template. - - This corresponds to the ``tag_template_field_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag_template_field (google.cloud.datacatalog_v1.types.TagTemplateField): - Required. The tag template field to - create. - - This corresponds to the ``tag_template_field`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.TagTemplateField: - The template for an individual field - within a tag template. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, tag_template_field_id, tag_template_field]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.CreateTagTemplateFieldRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.CreateTagTemplateFieldRequest): - request = datacatalog.CreateTagTemplateFieldRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if tag_template_field_id is not None: - request.tag_template_field_id = tag_template_field_id - if tag_template_field is not None: - request.tag_template_field = tag_template_field - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_tag_template_field] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_tag_template_field(self, - request: Optional[Union[datacatalog.UpdateTagTemplateFieldRequest, dict]] = None, - *, - name: Optional[str] = None, - tag_template_field: Optional[tags.TagTemplateField] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplateField: - r"""Updates a field in a tag template. - - You can't update the field type with this method. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see `Data - Catalog resource - project `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_update_tag_template_field(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - tag_template_field = datacatalog_v1.TagTemplateField() - tag_template_field.type_.primitive_type = "RICHTEXT" - - request = datacatalog_v1.UpdateTagTemplateFieldRequest( - name="name_value", - tag_template_field=tag_template_field, - ) - - # Make the request - response = client.update_tag_template_field(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.UpdateTagTemplateFieldRequest, dict]): - The request object. Request message for - [UpdateTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.UpdateTagTemplateField]. - name (str): - Required. The name of the tag - template field. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag_template_field (google.cloud.datacatalog_v1.types.TagTemplateField): - Required. The template to update. - This corresponds to the ``tag_template_field`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Names of fields whose values to overwrite on - an individual field of a tag template. The following - fields are modifiable: - - - ``display_name`` - - ``type.enum_type`` - - ``is_required`` - - If this parameter is absent or empty, all modifiable - fields are overwritten. If such fields are non-required - and omitted in the request body, their values are - emptied with one exception: when updating an enum type, - the provided values are merged with the existing values. - Therefore, enum values can only be added, existing enum - values cannot be deleted or renamed. - - Additionally, updating a template field from optional to - required is *not* allowed. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.TagTemplateField: - The template for an individual field - within a tag template. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, tag_template_field, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.UpdateTagTemplateFieldRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.UpdateTagTemplateFieldRequest): - request = datacatalog.UpdateTagTemplateFieldRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if tag_template_field is not None: - request.tag_template_field = tag_template_field - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_tag_template_field] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def rename_tag_template_field(self, - request: Optional[Union[datacatalog.RenameTagTemplateFieldRequest, dict]] = None, - *, - name: Optional[str] = None, - new_tag_template_field_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplateField: - r"""Renames a field in a tag template. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see [Data - Catalog resource project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_rename_tag_template_field(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.RenameTagTemplateFieldRequest( - name="name_value", - new_tag_template_field_id="new_tag_template_field_id_value", - ) - - # Make the request - response = client.rename_tag_template_field(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.RenameTagTemplateFieldRequest, dict]): - The request object. Request message for - [RenameTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateField]. - name (str): - Required. The name of the tag - template field. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - new_tag_template_field_id (str): - Required. The new ID of this tag template field. For - example, ``my_new_field``. - - This corresponds to the ``new_tag_template_field_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.TagTemplateField: - The template for an individual field - within a tag template. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, new_tag_template_field_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.RenameTagTemplateFieldRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.RenameTagTemplateFieldRequest): - request = datacatalog.RenameTagTemplateFieldRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if new_tag_template_field_id is not None: - request.new_tag_template_field_id = new_tag_template_field_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.rename_tag_template_field] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def rename_tag_template_field_enum_value(self, - request: Optional[Union[datacatalog.RenameTagTemplateFieldEnumValueRequest, dict]] = None, - *, - name: Optional[str] = None, - new_enum_value_display_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplateField: - r"""Renames an enum value in a tag template. - - Within a single enum field, enum values must be unique. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_rename_tag_template_field_enum_value(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.RenameTagTemplateFieldEnumValueRequest( - name="name_value", - new_enum_value_display_name="new_enum_value_display_name_value", - ) - - # Make the request - response = client.rename_tag_template_field_enum_value(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.RenameTagTemplateFieldEnumValueRequest, dict]): - The request object. Request message for - [RenameTagTemplateFieldEnumValue][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateFieldEnumValue]. - name (str): - Required. The name of the enum field - value. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - new_enum_value_display_name (str): - Required. The new display name of the enum value. For - example, ``my_new_enum_value``. - - This corresponds to the ``new_enum_value_display_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.TagTemplateField: - The template for an individual field - within a tag template. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, new_enum_value_display_name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.RenameTagTemplateFieldEnumValueRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.RenameTagTemplateFieldEnumValueRequest): - request = datacatalog.RenameTagTemplateFieldEnumValueRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if new_enum_value_display_name is not None: - request.new_enum_value_display_name = new_enum_value_display_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.rename_tag_template_field_enum_value] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_tag_template_field(self, - request: Optional[Union[datacatalog.DeleteTagTemplateFieldRequest, dict]] = None, - *, - name: Optional[str] = None, - force: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a field in a tag template and all uses of this field - from the tags based on this template. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see `Data - Catalog resource - project `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_delete_tag_template_field(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteTagTemplateFieldRequest( - name="name_value", - force=True, - ) - - # Make the request - client.delete_tag_template_field(request=request) - - Args: - request (Union[google.cloud.datacatalog_v1.types.DeleteTagTemplateFieldRequest, dict]): - The request object. Request message for - [DeleteTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.DeleteTagTemplateField]. - name (str): - Required. The name of the tag - template field to delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - force (bool): - Required. If true, deletes this field from any tags that - use it. - - Currently, ``true`` is the only supported value. - - This corresponds to the ``force`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, force]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.DeleteTagTemplateFieldRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.DeleteTagTemplateFieldRequest): - request = datacatalog.DeleteTagTemplateFieldRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if force is not None: - request.force = force - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_tag_template_field] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_tag(self, - request: Optional[Union[datacatalog.CreateTagRequest, dict]] = None, - *, - parent: Optional[str] = None, - tag: Optional[tags.Tag] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.Tag: - r"""Creates a tag and assigns it to: - - - An [Entry][google.cloud.datacatalog.v1.Entry] if the method - name is - ``projects.locations.entryGroups.entries.tags.create``. - - Or [EntryGroup][google.cloud.datacatalog.v1.EntryGroup]if the - method name is - ``projects.locations.entryGroups.tags.create``. - - Note: The project identified by the ``parent`` parameter for the - [tag] - (https://cloud.google.com/data-catalog/docs/reference/rest/v1/projects.locations.entryGroups.entries.tags/create#path-parameters) - and the [tag template] - (https://cloud.google.com/data-catalog/docs/reference/rest/v1/projects.locations.tagTemplates/create#path-parameters) - used to create the tag must be in the same organization. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_create_tag(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - tag = datacatalog_v1.Tag() - tag.column = "column_value" - tag.template = "template_value" - - request = datacatalog_v1.CreateTagRequest( - parent="parent_value", - tag=tag, - ) - - # Make the request - response = client.create_tag(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.CreateTagRequest, dict]): - The request object. Request message for - [CreateTag][google.cloud.datacatalog.v1.DataCatalog.CreateTag]. - parent (str): - Required. The name of the resource to - attach this tag to. - Tags can be attached to entries or entry - groups. An entry can have up to 1000 - attached tags. - - Note: The tag and its child resources - might not be stored in the location - specified in its name. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag (google.cloud.datacatalog_v1.types.Tag): - Required. The tag to create. - This corresponds to the ``tag`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.Tag: - Tags contain custom metadata and are attached to Data Catalog resources. Tags - conform with the specification of their tag template. - - See [Data Catalog - IAM](\ https://cloud.google.com/data-catalog/docs/concepts/iam) - for information on the permissions needed to create - or view tags. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, tag]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.CreateTagRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.CreateTagRequest): - request = datacatalog.CreateTagRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if tag is not None: - request.tag = tag - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_tag] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_tag(self, - request: Optional[Union[datacatalog.UpdateTagRequest, dict]] = None, - *, - tag: Optional[tags.Tag] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.Tag: - r"""Updates an existing tag. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_update_tag(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - tag = datacatalog_v1.Tag() - tag.column = "column_value" - tag.template = "template_value" - - request = datacatalog_v1.UpdateTagRequest( - tag=tag, - ) - - # Make the request - response = client.update_tag(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.UpdateTagRequest, dict]): - The request object. Request message for - [UpdateTag][google.cloud.datacatalog.v1.DataCatalog.UpdateTag]. - tag (google.cloud.datacatalog_v1.types.Tag): - Required. The updated tag. The "name" - field must be set. - - This corresponds to the ``tag`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Names of fields whose values to overwrite on a tag. - Currently, a tag has the only modifiable field with the - name ``fields``. - - In general, if this parameter is absent or empty, all - modifiable fields are overwritten. If such fields are - non-required and omitted in the request body, their - values are emptied. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.Tag: - Tags contain custom metadata and are attached to Data Catalog resources. Tags - conform with the specification of their tag template. - - See [Data Catalog - IAM](\ https://cloud.google.com/data-catalog/docs/concepts/iam) - for information on the permissions needed to create - or view tags. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([tag, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.UpdateTagRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.UpdateTagRequest): - request = datacatalog.UpdateTagRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if tag is not None: - request.tag = tag - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_tag] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("tag.name", request.tag.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_tag(self, - request: Optional[Union[datacatalog.DeleteTagRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a tag. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_delete_tag(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteTagRequest( - name="name_value", - ) - - # Make the request - client.delete_tag(request=request) - - Args: - request (Union[google.cloud.datacatalog_v1.types.DeleteTagRequest, dict]): - The request object. Request message for - [DeleteTag][google.cloud.datacatalog.v1.DataCatalog.DeleteTag]. - name (str): - Required. The name of the tag to - delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.DeleteTagRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.DeleteTagRequest): - request = datacatalog.DeleteTagRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_tag] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def list_tags(self, - request: Optional[Union[datacatalog.ListTagsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTagsPager: - r"""Lists tags assigned to an - [Entry][google.cloud.datacatalog.v1.Entry]. The - [columns][google.cloud.datacatalog.v1.Tag.column] in the - response are lowercased. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_list_tags(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.ListTagsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tags(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.ListTagsRequest, dict]): - The request object. Request message for - [ListTags][google.cloud.datacatalog.v1.DataCatalog.ListTags]. - parent (str): - Required. The name of the Data Catalog resource to list - the tags of. - - The resource can be an - [Entry][google.cloud.datacatalog.v1.Entry] or an - [EntryGroup][google.cloud.datacatalog.v1.EntryGroup] - (without ``/entries/{entries}`` at the end). - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.services.data_catalog.pagers.ListTagsPager: - Response message for - [ListTags][google.cloud.datacatalog.v1.DataCatalog.ListTags]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.ListTagsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.ListTagsRequest): - request = datacatalog.ListTagsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_tags] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListTagsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def reconcile_tags(self, - request: Optional[Union[datacatalog.ReconcileTagsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""``ReconcileTags`` creates or updates a list of tags on the - entry. If the - [ReconcileTagsRequest.force_delete_missing][google.cloud.datacatalog.v1.ReconcileTagsRequest.force_delete_missing] - parameter is set, the operation deletes tags not included in the - input tag list. - - ``ReconcileTags`` returns a [long-running operation] - [google.longrunning.Operation] resource that can be queried with - [Operations.GetOperation][google.longrunning.Operations.GetOperation] - to return [ReconcileTagsMetadata] - [google.cloud.datacatalog.v1.ReconcileTagsMetadata] and a - [ReconcileTagsResponse] - [google.cloud.datacatalog.v1.ReconcileTagsResponse] message. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_reconcile_tags(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.ReconcileTagsRequest( - parent="parent_value", - tag_template="tag_template_value", - ) - - # Make the request - operation = client.reconcile_tags(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.ReconcileTagsRequest, dict]): - The request object. Request message for - [ReconcileTags][google.cloud.datacatalog.v1.DataCatalog.ReconcileTags]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.datacatalog_v1.types.ReconcileTagsResponse` [Long-running operation][google.longrunning.Operation] - response message returned by - [ReconcileTags][google.cloud.datacatalog.v1.DataCatalog.ReconcileTags]. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.ReconcileTagsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.ReconcileTagsRequest): - request = datacatalog.ReconcileTagsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.reconcile_tags] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - datacatalog.ReconcileTagsResponse, - metadata_type=datacatalog.ReconcileTagsMetadata, - ) - - # Done; return the response. - return response - - def star_entry(self, - request: Optional[Union[datacatalog.StarEntryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.StarEntryResponse: - r"""Marks an [Entry][google.cloud.datacatalog.v1.Entry] as starred - by the current user. Starring information is private to each - user. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_star_entry(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.StarEntryRequest( - name="name_value", - ) - - # Make the request - response = client.star_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.StarEntryRequest, dict]): - The request object. Request message for - [StarEntry][google.cloud.datacatalog.v1.DataCatalog.StarEntry]. - name (str): - Required. The name of the entry to - mark as starred. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.StarEntryResponse: - Response message for - [StarEntry][google.cloud.datacatalog.v1.DataCatalog.StarEntry]. - Empty for now - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.StarEntryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.StarEntryRequest): - request = datacatalog.StarEntryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.star_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def unstar_entry(self, - request: Optional[Union[datacatalog.UnstarEntryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.UnstarEntryResponse: - r"""Marks an [Entry][google.cloud.datacatalog.v1.Entry] as NOT - starred by the current user. Starring information is private to - each user. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_unstar_entry(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.UnstarEntryRequest( - name="name_value", - ) - - # Make the request - response = client.unstar_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.UnstarEntryRequest, dict]): - The request object. Request message for - [UnstarEntry][google.cloud.datacatalog.v1.DataCatalog.UnstarEntry]. - name (str): - Required. The name of the entry to mark as **not** - starred. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.UnstarEntryResponse: - Response message for - [UnstarEntry][google.cloud.datacatalog.v1.DataCatalog.UnstarEntry]. - Empty for now - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.UnstarEntryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.UnstarEntryRequest): - request = datacatalog.UnstarEntryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.unstar_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets an access control policy for a resource. Replaces any - existing policy. - - Supported resources are: - - - Tag templates - - Entry groups - - Note: This method sets policies only within Data Catalog and - can't be used to manage policies in BigQuery, Pub/Sub, Dataproc - Metastore, and any external Google Cloud Platform resources - synced with the Data Catalog. - - To call this method, you must have the following Google IAM - permissions: - - - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on - tag templates. - - ``datacatalog.entryGroups.setIamPolicy`` to set policies on - entry groups. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_set_iam_policy(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): - The request object. Request message for ``SetIamPolicy`` method. - resource (str): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.SetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for a resource. - - May return: - - - A\ ``NOT_FOUND`` error if the resource doesn't exist or you - don't have the permission to view it. - - An empty policy if the resource exists but doesn't have a set - policy. - - Supported resources are: - - - Tag templates - - Entry groups - - Note: This method doesn't get policies from Google Cloud - Platform resources ingested into Data Catalog. - - To call this method, you must have the following Google IAM - permissions: - - - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on - tag templates. - - ``datacatalog.entryGroups.getIamPolicy`` to get policies on - entry groups. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_get_iam_policy(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): - The request object. Request message for ``GetIamPolicy`` method. - resource (str): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.GetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Gets your permissions on a resource. - - Returns an empty set of permissions if the resource - doesn't exist. - - Supported resources are: - - - Tag templates - - Entry groups - - Note: This method gets policies only within Data Catalog - and can't be used to get policies from BigQuery, - Pub/Sub, Dataproc Metastore, and any external Google - Cloud Platform resources ingested into Data Catalog. - - No Google IAM permissions are required to call this - method. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_test_iam_permissions(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def import_entries(self, - request: Optional[Union[datacatalog.ImportEntriesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Imports entries from a source, such as data previously dumped - into a Cloud Storage bucket, into Data Catalog. Import of - entries is a sync operation that reconciles the state of the - third-party system with the Data Catalog. - - ``ImportEntries`` accepts source data snapshots of a third-party - system. Snapshot should be delivered as a .wire or - base65-encoded .txt file containing a sequence of Protocol - Buffer messages of - [DumpItem][google.cloud.datacatalog.v1.DumpItem] type. - - ``ImportEntries`` returns a [long-running operation] - [google.longrunning.Operation] resource that can be queried with - [Operations.GetOperation][google.longrunning.Operations.GetOperation] - to return - [ImportEntriesMetadata][google.cloud.datacatalog.v1.ImportEntriesMetadata] - and an - [ImportEntriesResponse][google.cloud.datacatalog.v1.ImportEntriesResponse] - message. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_import_entries(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.ImportEntriesRequest( - gcs_bucket_path="gcs_bucket_path_value", - parent="parent_value", - ) - - # Make the request - operation = client.import_entries(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.ImportEntriesRequest, dict]): - The request object. Request message for - [ImportEntries][google.cloud.datacatalog.v1.DataCatalog.ImportEntries] - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.datacatalog_v1.types.ImportEntriesResponse` Response message for [long-running operation][google.longrunning.Operation] - returned by the - [ImportEntries][google.cloud.datacatalog.v1.DataCatalog.ImportEntries]. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.ImportEntriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.ImportEntriesRequest): - request = datacatalog.ImportEntriesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.import_entries] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - datacatalog.ImportEntriesResponse, - metadata_type=datacatalog.ImportEntriesMetadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "DataCatalogClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DataCatalogClient", -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/pagers.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/pagers.py deleted file mode 100644 index a9091c273cd1..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/pagers.py +++ /dev/null @@ -1,504 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.datacatalog_v1.types import datacatalog -from google.cloud.datacatalog_v1.types import search -from google.cloud.datacatalog_v1.types import tags - - -class SearchCatalogPager: - """A pager for iterating through ``search_catalog`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1.types.SearchCatalogResponse` object, and - provides an ``__iter__`` method to iterate through its - ``results`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``SearchCatalog`` requests and continue to iterate - through the ``results`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1.types.SearchCatalogResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., datacatalog.SearchCatalogResponse], - request: datacatalog.SearchCatalogRequest, - response: datacatalog.SearchCatalogResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1.types.SearchCatalogRequest): - The initial request object. - response (google.cloud.datacatalog_v1.types.SearchCatalogResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = datacatalog.SearchCatalogRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[datacatalog.SearchCatalogResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[search.SearchCatalogResult]: - for page in self.pages: - yield from page.results - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class SearchCatalogAsyncPager: - """A pager for iterating through ``search_catalog`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1.types.SearchCatalogResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``results`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``SearchCatalog`` requests and continue to iterate - through the ``results`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1.types.SearchCatalogResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[datacatalog.SearchCatalogResponse]], - request: datacatalog.SearchCatalogRequest, - response: datacatalog.SearchCatalogResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1.types.SearchCatalogRequest): - The initial request object. - response (google.cloud.datacatalog_v1.types.SearchCatalogResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = datacatalog.SearchCatalogRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[datacatalog.SearchCatalogResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[search.SearchCatalogResult]: - async def async_generator(): - async for page in self.pages: - for response in page.results: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEntryGroupsPager: - """A pager for iterating through ``list_entry_groups`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1.types.ListEntryGroupsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``entry_groups`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListEntryGroups`` requests and continue to iterate - through the ``entry_groups`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1.types.ListEntryGroupsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., datacatalog.ListEntryGroupsResponse], - request: datacatalog.ListEntryGroupsRequest, - response: datacatalog.ListEntryGroupsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1.types.ListEntryGroupsRequest): - The initial request object. - response (google.cloud.datacatalog_v1.types.ListEntryGroupsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = datacatalog.ListEntryGroupsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[datacatalog.ListEntryGroupsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[datacatalog.EntryGroup]: - for page in self.pages: - yield from page.entry_groups - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEntryGroupsAsyncPager: - """A pager for iterating through ``list_entry_groups`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1.types.ListEntryGroupsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``entry_groups`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListEntryGroups`` requests and continue to iterate - through the ``entry_groups`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1.types.ListEntryGroupsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[datacatalog.ListEntryGroupsResponse]], - request: datacatalog.ListEntryGroupsRequest, - response: datacatalog.ListEntryGroupsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1.types.ListEntryGroupsRequest): - The initial request object. - response (google.cloud.datacatalog_v1.types.ListEntryGroupsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = datacatalog.ListEntryGroupsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[datacatalog.ListEntryGroupsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[datacatalog.EntryGroup]: - async def async_generator(): - async for page in self.pages: - for response in page.entry_groups: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEntriesPager: - """A pager for iterating through ``list_entries`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1.types.ListEntriesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``entries`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListEntries`` requests and continue to iterate - through the ``entries`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1.types.ListEntriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., datacatalog.ListEntriesResponse], - request: datacatalog.ListEntriesRequest, - response: datacatalog.ListEntriesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1.types.ListEntriesRequest): - The initial request object. - response (google.cloud.datacatalog_v1.types.ListEntriesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = datacatalog.ListEntriesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[datacatalog.ListEntriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[datacatalog.Entry]: - for page in self.pages: - yield from page.entries - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEntriesAsyncPager: - """A pager for iterating through ``list_entries`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1.types.ListEntriesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``entries`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListEntries`` requests and continue to iterate - through the ``entries`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1.types.ListEntriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[datacatalog.ListEntriesResponse]], - request: datacatalog.ListEntriesRequest, - response: datacatalog.ListEntriesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1.types.ListEntriesRequest): - The initial request object. - response (google.cloud.datacatalog_v1.types.ListEntriesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = datacatalog.ListEntriesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[datacatalog.ListEntriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[datacatalog.Entry]: - async def async_generator(): - async for page in self.pages: - for response in page.entries: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTagsPager: - """A pager for iterating through ``list_tags`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1.types.ListTagsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``tags`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTags`` requests and continue to iterate - through the ``tags`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1.types.ListTagsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., datacatalog.ListTagsResponse], - request: datacatalog.ListTagsRequest, - response: datacatalog.ListTagsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1.types.ListTagsRequest): - The initial request object. - response (google.cloud.datacatalog_v1.types.ListTagsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = datacatalog.ListTagsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[datacatalog.ListTagsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[tags.Tag]: - for page in self.pages: - yield from page.tags - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTagsAsyncPager: - """A pager for iterating through ``list_tags`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1.types.ListTagsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``tags`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListTags`` requests and continue to iterate - through the ``tags`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1.types.ListTagsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[datacatalog.ListTagsResponse]], - request: datacatalog.ListTagsRequest, - response: datacatalog.ListTagsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1.types.ListTagsRequest): - The initial request object. - response (google.cloud.datacatalog_v1.types.ListTagsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = datacatalog.ListTagsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[datacatalog.ListTagsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[tags.Tag]: - async def async_generator(): - async for page in self.pages: - for response in page.tags: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/__init__.py deleted file mode 100644 index 8b4fbbf168be..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import DataCatalogTransport -from .grpc import DataCatalogGrpcTransport -from .grpc_asyncio import DataCatalogGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[DataCatalogTransport]] -_transport_registry['grpc'] = DataCatalogGrpcTransport -_transport_registry['grpc_asyncio'] = DataCatalogGrpcAsyncIOTransport - -__all__ = ( - 'DataCatalogTransport', - 'DataCatalogGrpcTransport', - 'DataCatalogGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py deleted file mode 100644 index b78071be0649..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py +++ /dev/null @@ -1,657 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.datacatalog_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.datacatalog_v1.types import datacatalog -from google.cloud.datacatalog_v1.types import tags -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class DataCatalogTransport(abc.ABC): - """Abstract transport class for DataCatalog.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'datacatalog.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.search_catalog: gapic_v1.method.wrap_method( - self.search_catalog, - default_timeout=None, - client_info=client_info, - ), - self.create_entry_group: gapic_v1.method.wrap_method( - self.create_entry_group, - default_timeout=None, - client_info=client_info, - ), - self.get_entry_group: gapic_v1.method.wrap_method( - self.get_entry_group, - default_timeout=None, - client_info=client_info, - ), - self.update_entry_group: gapic_v1.method.wrap_method( - self.update_entry_group, - default_timeout=None, - client_info=client_info, - ), - self.delete_entry_group: gapic_v1.method.wrap_method( - self.delete_entry_group, - default_timeout=None, - client_info=client_info, - ), - self.list_entry_groups: gapic_v1.method.wrap_method( - self.list_entry_groups, - default_timeout=None, - client_info=client_info, - ), - self.create_entry: gapic_v1.method.wrap_method( - self.create_entry, - default_timeout=None, - client_info=client_info, - ), - self.update_entry: gapic_v1.method.wrap_method( - self.update_entry, - default_timeout=None, - client_info=client_info, - ), - self.delete_entry: gapic_v1.method.wrap_method( - self.delete_entry, - default_timeout=None, - client_info=client_info, - ), - self.get_entry: gapic_v1.method.wrap_method( - self.get_entry, - default_timeout=None, - client_info=client_info, - ), - self.lookup_entry: gapic_v1.method.wrap_method( - self.lookup_entry, - default_timeout=None, - client_info=client_info, - ), - self.list_entries: gapic_v1.method.wrap_method( - self.list_entries, - default_timeout=None, - client_info=client_info, - ), - self.modify_entry_overview: gapic_v1.method.wrap_method( - self.modify_entry_overview, - default_timeout=None, - client_info=client_info, - ), - self.modify_entry_contacts: gapic_v1.method.wrap_method( - self.modify_entry_contacts, - default_timeout=None, - client_info=client_info, - ), - self.create_tag_template: gapic_v1.method.wrap_method( - self.create_tag_template, - default_timeout=None, - client_info=client_info, - ), - self.get_tag_template: gapic_v1.method.wrap_method( - self.get_tag_template, - default_timeout=None, - client_info=client_info, - ), - self.update_tag_template: gapic_v1.method.wrap_method( - self.update_tag_template, - default_timeout=None, - client_info=client_info, - ), - self.delete_tag_template: gapic_v1.method.wrap_method( - self.delete_tag_template, - default_timeout=None, - client_info=client_info, - ), - self.create_tag_template_field: gapic_v1.method.wrap_method( - self.create_tag_template_field, - default_timeout=None, - client_info=client_info, - ), - self.update_tag_template_field: gapic_v1.method.wrap_method( - self.update_tag_template_field, - default_timeout=None, - client_info=client_info, - ), - self.rename_tag_template_field: gapic_v1.method.wrap_method( - self.rename_tag_template_field, - default_timeout=None, - client_info=client_info, - ), - self.rename_tag_template_field_enum_value: gapic_v1.method.wrap_method( - self.rename_tag_template_field_enum_value, - default_timeout=None, - client_info=client_info, - ), - self.delete_tag_template_field: gapic_v1.method.wrap_method( - self.delete_tag_template_field, - default_timeout=None, - client_info=client_info, - ), - self.create_tag: gapic_v1.method.wrap_method( - self.create_tag, - default_timeout=None, - client_info=client_info, - ), - self.update_tag: gapic_v1.method.wrap_method( - self.update_tag, - default_timeout=None, - client_info=client_info, - ), - self.delete_tag: gapic_v1.method.wrap_method( - self.delete_tag, - default_timeout=None, - client_info=client_info, - ), - self.list_tags: gapic_v1.method.wrap_method( - self.list_tags, - default_timeout=None, - client_info=client_info, - ), - self.reconcile_tags: gapic_v1.method.wrap_method( - self.reconcile_tags, - default_timeout=None, - client_info=client_info, - ), - self.star_entry: gapic_v1.method.wrap_method( - self.star_entry, - default_timeout=None, - client_info=client_info, - ), - self.unstar_entry: gapic_v1.method.wrap_method( - self.unstar_entry, - default_timeout=None, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_timeout=None, - client_info=client_info, - ), - self.import_entries: gapic_v1.method.wrap_method( - self.import_entries, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def search_catalog(self) -> Callable[ - [datacatalog.SearchCatalogRequest], - Union[ - datacatalog.SearchCatalogResponse, - Awaitable[datacatalog.SearchCatalogResponse] - ]]: - raise NotImplementedError() - - @property - def create_entry_group(self) -> Callable[ - [datacatalog.CreateEntryGroupRequest], - Union[ - datacatalog.EntryGroup, - Awaitable[datacatalog.EntryGroup] - ]]: - raise NotImplementedError() - - @property - def get_entry_group(self) -> Callable[ - [datacatalog.GetEntryGroupRequest], - Union[ - datacatalog.EntryGroup, - Awaitable[datacatalog.EntryGroup] - ]]: - raise NotImplementedError() - - @property - def update_entry_group(self) -> Callable[ - [datacatalog.UpdateEntryGroupRequest], - Union[ - datacatalog.EntryGroup, - Awaitable[datacatalog.EntryGroup] - ]]: - raise NotImplementedError() - - @property - def delete_entry_group(self) -> Callable[ - [datacatalog.DeleteEntryGroupRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_entry_groups(self) -> Callable[ - [datacatalog.ListEntryGroupsRequest], - Union[ - datacatalog.ListEntryGroupsResponse, - Awaitable[datacatalog.ListEntryGroupsResponse] - ]]: - raise NotImplementedError() - - @property - def create_entry(self) -> Callable[ - [datacatalog.CreateEntryRequest], - Union[ - datacatalog.Entry, - Awaitable[datacatalog.Entry] - ]]: - raise NotImplementedError() - - @property - def update_entry(self) -> Callable[ - [datacatalog.UpdateEntryRequest], - Union[ - datacatalog.Entry, - Awaitable[datacatalog.Entry] - ]]: - raise NotImplementedError() - - @property - def delete_entry(self) -> Callable[ - [datacatalog.DeleteEntryRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_entry(self) -> Callable[ - [datacatalog.GetEntryRequest], - Union[ - datacatalog.Entry, - Awaitable[datacatalog.Entry] - ]]: - raise NotImplementedError() - - @property - def lookup_entry(self) -> Callable[ - [datacatalog.LookupEntryRequest], - Union[ - datacatalog.Entry, - Awaitable[datacatalog.Entry] - ]]: - raise NotImplementedError() - - @property - def list_entries(self) -> Callable[ - [datacatalog.ListEntriesRequest], - Union[ - datacatalog.ListEntriesResponse, - Awaitable[datacatalog.ListEntriesResponse] - ]]: - raise NotImplementedError() - - @property - def modify_entry_overview(self) -> Callable[ - [datacatalog.ModifyEntryOverviewRequest], - Union[ - datacatalog.EntryOverview, - Awaitable[datacatalog.EntryOverview] - ]]: - raise NotImplementedError() - - @property - def modify_entry_contacts(self) -> Callable[ - [datacatalog.ModifyEntryContactsRequest], - Union[ - datacatalog.Contacts, - Awaitable[datacatalog.Contacts] - ]]: - raise NotImplementedError() - - @property - def create_tag_template(self) -> Callable[ - [datacatalog.CreateTagTemplateRequest], - Union[ - tags.TagTemplate, - Awaitable[tags.TagTemplate] - ]]: - raise NotImplementedError() - - @property - def get_tag_template(self) -> Callable[ - [datacatalog.GetTagTemplateRequest], - Union[ - tags.TagTemplate, - Awaitable[tags.TagTemplate] - ]]: - raise NotImplementedError() - - @property - def update_tag_template(self) -> Callable[ - [datacatalog.UpdateTagTemplateRequest], - Union[ - tags.TagTemplate, - Awaitable[tags.TagTemplate] - ]]: - raise NotImplementedError() - - @property - def delete_tag_template(self) -> Callable[ - [datacatalog.DeleteTagTemplateRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_tag_template_field(self) -> Callable[ - [datacatalog.CreateTagTemplateFieldRequest], - Union[ - tags.TagTemplateField, - Awaitable[tags.TagTemplateField] - ]]: - raise NotImplementedError() - - @property - def update_tag_template_field(self) -> Callable[ - [datacatalog.UpdateTagTemplateFieldRequest], - Union[ - tags.TagTemplateField, - Awaitable[tags.TagTemplateField] - ]]: - raise NotImplementedError() - - @property - def rename_tag_template_field(self) -> Callable[ - [datacatalog.RenameTagTemplateFieldRequest], - Union[ - tags.TagTemplateField, - Awaitable[tags.TagTemplateField] - ]]: - raise NotImplementedError() - - @property - def rename_tag_template_field_enum_value(self) -> Callable[ - [datacatalog.RenameTagTemplateFieldEnumValueRequest], - Union[ - tags.TagTemplateField, - Awaitable[tags.TagTemplateField] - ]]: - raise NotImplementedError() - - @property - def delete_tag_template_field(self) -> Callable[ - [datacatalog.DeleteTagTemplateFieldRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_tag(self) -> Callable[ - [datacatalog.CreateTagRequest], - Union[ - tags.Tag, - Awaitable[tags.Tag] - ]]: - raise NotImplementedError() - - @property - def update_tag(self) -> Callable[ - [datacatalog.UpdateTagRequest], - Union[ - tags.Tag, - Awaitable[tags.Tag] - ]]: - raise NotImplementedError() - - @property - def delete_tag(self) -> Callable[ - [datacatalog.DeleteTagRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_tags(self) -> Callable[ - [datacatalog.ListTagsRequest], - Union[ - datacatalog.ListTagsResponse, - Awaitable[datacatalog.ListTagsResponse] - ]]: - raise NotImplementedError() - - @property - def reconcile_tags(self) -> Callable[ - [datacatalog.ReconcileTagsRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def star_entry(self) -> Callable[ - [datacatalog.StarEntryRequest], - Union[ - datacatalog.StarEntryResponse, - Awaitable[datacatalog.StarEntryResponse] - ]]: - raise NotImplementedError() - - @property - def unstar_entry(self) -> Callable[ - [datacatalog.UnstarEntryRequest], - Union[ - datacatalog.UnstarEntryResponse, - Awaitable[datacatalog.UnstarEntryResponse] - ]]: - raise NotImplementedError() - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse] - ]]: - raise NotImplementedError() - - @property - def import_entries(self) -> Callable[ - [datacatalog.ImportEntriesRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'DataCatalogTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py deleted file mode 100644 index 95e3e749eeac..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py +++ /dev/null @@ -1,1469 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.datacatalog_v1.types import datacatalog -from google.cloud.datacatalog_v1.types import tags -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO - - -class DataCatalogGrpcTransport(DataCatalogTransport): - """gRPC backend transport for DataCatalog. - - Data Catalog API service allows you to discover, understand, - and manage your data. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def search_catalog(self) -> Callable[ - [datacatalog.SearchCatalogRequest], - datacatalog.SearchCatalogResponse]: - r"""Return a callable for the search catalog method over gRPC. - - Searches Data Catalog for multiple resources like entries and - tags that match a query. - - This is a [Custom Method] - (https://cloud.google.com/apis/design/custom_methods) that - doesn't return all information on a resource, only its ID and - high level fields. To get more information, you can subsequently - call specific get methods. - - Note: Data Catalog search queries don't guarantee full recall. - Results that match your query might not be returned, even in - subsequent result pages. Additionally, returned (and not - returned) results can vary if you repeat search queries. - - For more information, see [Data Catalog search syntax] - (https://cloud.google.com/data-catalog/docs/how-to/search-reference). - - Returns: - Callable[[~.SearchCatalogRequest], - ~.SearchCatalogResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_catalog' not in self._stubs: - self._stubs['search_catalog'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/SearchCatalog', - request_serializer=datacatalog.SearchCatalogRequest.serialize, - response_deserializer=datacatalog.SearchCatalogResponse.deserialize, - ) - return self._stubs['search_catalog'] - - @property - def create_entry_group(self) -> Callable[ - [datacatalog.CreateEntryGroupRequest], - datacatalog.EntryGroup]: - r"""Return a callable for the create entry group method over gRPC. - - Creates an entry group. - - An entry group contains logically related entries together with - `Cloud Identity and Access - Management `__ policies. These - policies specify users who can create, edit, and view entries - within entry groups. - - Data Catalog automatically creates entry groups with names that - start with the ``@`` symbol for the following resources: - - - BigQuery entries (``@bigquery``) - - Pub/Sub topics (``@pubsub``) - - Dataproc Metastore services - (``@dataproc_metastore_{SERVICE_NAME_HASH}``) - - You can create your own entry groups for Cloud Storage fileset - entries and custom entries together with the corresponding IAM - policies. User-created entry groups can't contain the ``@`` - symbol, it is reserved for automatically created groups. - - Entry groups, like entries, can be searched. - - A maximum of 10,000 entry groups may be created per organization - across all locations. - - You must enable the Data Catalog API in the project identified - by the ``parent`` parameter. For more information, see `Data - Catalog resource - project `__. - - Returns: - Callable[[~.CreateEntryGroupRequest], - ~.EntryGroup]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entry_group' not in self._stubs: - self._stubs['create_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/CreateEntryGroup', - request_serializer=datacatalog.CreateEntryGroupRequest.serialize, - response_deserializer=datacatalog.EntryGroup.deserialize, - ) - return self._stubs['create_entry_group'] - - @property - def get_entry_group(self) -> Callable[ - [datacatalog.GetEntryGroupRequest], - datacatalog.EntryGroup]: - r"""Return a callable for the get entry group method over gRPC. - - Gets an entry group. - - Returns: - Callable[[~.GetEntryGroupRequest], - ~.EntryGroup]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entry_group' not in self._stubs: - self._stubs['get_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/GetEntryGroup', - request_serializer=datacatalog.GetEntryGroupRequest.serialize, - response_deserializer=datacatalog.EntryGroup.deserialize, - ) - return self._stubs['get_entry_group'] - - @property - def update_entry_group(self) -> Callable[ - [datacatalog.UpdateEntryGroupRequest], - datacatalog.EntryGroup]: - r"""Return a callable for the update entry group method over gRPC. - - Updates an entry group. - - You must enable the Data Catalog API in the project identified - by the ``entry_group.name`` parameter. For more information, see - `Data Catalog resource - project `__. - - Returns: - Callable[[~.UpdateEntryGroupRequest], - ~.EntryGroup]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entry_group' not in self._stubs: - self._stubs['update_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/UpdateEntryGroup', - request_serializer=datacatalog.UpdateEntryGroupRequest.serialize, - response_deserializer=datacatalog.EntryGroup.deserialize, - ) - return self._stubs['update_entry_group'] - - @property - def delete_entry_group(self) -> Callable[ - [datacatalog.DeleteEntryGroupRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete entry group method over gRPC. - - Deletes an entry group. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see `Data - Catalog resource - project `__. - - Returns: - Callable[[~.DeleteEntryGroupRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entry_group' not in self._stubs: - self._stubs['delete_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/DeleteEntryGroup', - request_serializer=datacatalog.DeleteEntryGroupRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_entry_group'] - - @property - def list_entry_groups(self) -> Callable[ - [datacatalog.ListEntryGroupsRequest], - datacatalog.ListEntryGroupsResponse]: - r"""Return a callable for the list entry groups method over gRPC. - - Lists entry groups. - - Returns: - Callable[[~.ListEntryGroupsRequest], - ~.ListEntryGroupsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entry_groups' not in self._stubs: - self._stubs['list_entry_groups'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/ListEntryGroups', - request_serializer=datacatalog.ListEntryGroupsRequest.serialize, - response_deserializer=datacatalog.ListEntryGroupsResponse.deserialize, - ) - return self._stubs['list_entry_groups'] - - @property - def create_entry(self) -> Callable[ - [datacatalog.CreateEntryRequest], - datacatalog.Entry]: - r"""Return a callable for the create entry method over gRPC. - - Creates an entry. - - You can create entries only with 'FILESET', 'CLUSTER', - 'DATA_STREAM', or custom types. Data Catalog automatically - creates entries with other types during metadata ingestion from - integrated systems. - - You must enable the Data Catalog API in the project identified - by the ``parent`` parameter. For more information, see `Data - Catalog resource - project `__. - - An entry group can have a maximum of 100,000 entries. - - Returns: - Callable[[~.CreateEntryRequest], - ~.Entry]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entry' not in self._stubs: - self._stubs['create_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/CreateEntry', - request_serializer=datacatalog.CreateEntryRequest.serialize, - response_deserializer=datacatalog.Entry.deserialize, - ) - return self._stubs['create_entry'] - - @property - def update_entry(self) -> Callable[ - [datacatalog.UpdateEntryRequest], - datacatalog.Entry]: - r"""Return a callable for the update entry method over gRPC. - - Updates an existing entry. - - You must enable the Data Catalog API in the project identified - by the ``entry.name`` parameter. For more information, see `Data - Catalog resource - project `__. - - Returns: - Callable[[~.UpdateEntryRequest], - ~.Entry]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entry' not in self._stubs: - self._stubs['update_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/UpdateEntry', - request_serializer=datacatalog.UpdateEntryRequest.serialize, - response_deserializer=datacatalog.Entry.deserialize, - ) - return self._stubs['update_entry'] - - @property - def delete_entry(self) -> Callable[ - [datacatalog.DeleteEntryRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete entry method over gRPC. - - Deletes an existing entry. - - You can delete only the entries created by the - [CreateEntry][google.cloud.datacatalog.v1.DataCatalog.CreateEntry] - method. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see `Data - Catalog resource - project `__. - - Returns: - Callable[[~.DeleteEntryRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entry' not in self._stubs: - self._stubs['delete_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/DeleteEntry', - request_serializer=datacatalog.DeleteEntryRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_entry'] - - @property - def get_entry(self) -> Callable[ - [datacatalog.GetEntryRequest], - datacatalog.Entry]: - r"""Return a callable for the get entry method over gRPC. - - Gets an entry. - - Returns: - Callable[[~.GetEntryRequest], - ~.Entry]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entry' not in self._stubs: - self._stubs['get_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/GetEntry', - request_serializer=datacatalog.GetEntryRequest.serialize, - response_deserializer=datacatalog.Entry.deserialize, - ) - return self._stubs['get_entry'] - - @property - def lookup_entry(self) -> Callable[ - [datacatalog.LookupEntryRequest], - datacatalog.Entry]: - r"""Return a callable for the lookup entry method over gRPC. - - Gets an entry by its target resource name. - - The resource name comes from the source Google Cloud - Platform service. - - Returns: - Callable[[~.LookupEntryRequest], - ~.Entry]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'lookup_entry' not in self._stubs: - self._stubs['lookup_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/LookupEntry', - request_serializer=datacatalog.LookupEntryRequest.serialize, - response_deserializer=datacatalog.Entry.deserialize, - ) - return self._stubs['lookup_entry'] - - @property - def list_entries(self) -> Callable[ - [datacatalog.ListEntriesRequest], - datacatalog.ListEntriesResponse]: - r"""Return a callable for the list entries method over gRPC. - - Lists entries. - - Note: Currently, this method can list only custom entries. To - get a list of both custom and automatically created entries, use - [SearchCatalog][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog]. - - Returns: - Callable[[~.ListEntriesRequest], - ~.ListEntriesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entries' not in self._stubs: - self._stubs['list_entries'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/ListEntries', - request_serializer=datacatalog.ListEntriesRequest.serialize, - response_deserializer=datacatalog.ListEntriesResponse.deserialize, - ) - return self._stubs['list_entries'] - - @property - def modify_entry_overview(self) -> Callable[ - [datacatalog.ModifyEntryOverviewRequest], - datacatalog.EntryOverview]: - r"""Return a callable for the modify entry overview method over gRPC. - - Modifies entry overview, part of the business context of an - [Entry][google.cloud.datacatalog.v1.Entry]. - - To call this method, you must have the - ``datacatalog.entries.updateOverview`` IAM permission on the - corresponding project. - - Returns: - Callable[[~.ModifyEntryOverviewRequest], - ~.EntryOverview]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'modify_entry_overview' not in self._stubs: - self._stubs['modify_entry_overview'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/ModifyEntryOverview', - request_serializer=datacatalog.ModifyEntryOverviewRequest.serialize, - response_deserializer=datacatalog.EntryOverview.deserialize, - ) - return self._stubs['modify_entry_overview'] - - @property - def modify_entry_contacts(self) -> Callable[ - [datacatalog.ModifyEntryContactsRequest], - datacatalog.Contacts]: - r"""Return a callable for the modify entry contacts method over gRPC. - - Modifies contacts, part of the business context of an - [Entry][google.cloud.datacatalog.v1.Entry]. - - To call this method, you must have the - ``datacatalog.entries.updateContacts`` IAM permission on the - corresponding project. - - Returns: - Callable[[~.ModifyEntryContactsRequest], - ~.Contacts]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'modify_entry_contacts' not in self._stubs: - self._stubs['modify_entry_contacts'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/ModifyEntryContacts', - request_serializer=datacatalog.ModifyEntryContactsRequest.serialize, - response_deserializer=datacatalog.Contacts.deserialize, - ) - return self._stubs['modify_entry_contacts'] - - @property - def create_tag_template(self) -> Callable[ - [datacatalog.CreateTagTemplateRequest], - tags.TagTemplate]: - r"""Return a callable for the create tag template method over gRPC. - - Creates a tag template. - - You must enable the Data Catalog API in the project identified - by the ``parent`` parameter. For more information, see [Data - Catalog resource project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project). - - Returns: - Callable[[~.CreateTagTemplateRequest], - ~.TagTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_tag_template' not in self._stubs: - self._stubs['create_tag_template'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/CreateTagTemplate', - request_serializer=datacatalog.CreateTagTemplateRequest.serialize, - response_deserializer=tags.TagTemplate.deserialize, - ) - return self._stubs['create_tag_template'] - - @property - def get_tag_template(self) -> Callable[ - [datacatalog.GetTagTemplateRequest], - tags.TagTemplate]: - r"""Return a callable for the get tag template method over gRPC. - - Gets a tag template. - - Returns: - Callable[[~.GetTagTemplateRequest], - ~.TagTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_tag_template' not in self._stubs: - self._stubs['get_tag_template'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/GetTagTemplate', - request_serializer=datacatalog.GetTagTemplateRequest.serialize, - response_deserializer=tags.TagTemplate.deserialize, - ) - return self._stubs['get_tag_template'] - - @property - def update_tag_template(self) -> Callable[ - [datacatalog.UpdateTagTemplateRequest], - tags.TagTemplate]: - r"""Return a callable for the update tag template method over gRPC. - - Updates a tag template. - - You can't update template fields with this method. These fields - are separate resources with their own create, update, and delete - methods. - - You must enable the Data Catalog API in the project identified - by the ``tag_template.name`` parameter. For more information, - see `Data Catalog resource - project `__. - - Returns: - Callable[[~.UpdateTagTemplateRequest], - ~.TagTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_tag_template' not in self._stubs: - self._stubs['update_tag_template'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/UpdateTagTemplate', - request_serializer=datacatalog.UpdateTagTemplateRequest.serialize, - response_deserializer=tags.TagTemplate.deserialize, - ) - return self._stubs['update_tag_template'] - - @property - def delete_tag_template(self) -> Callable[ - [datacatalog.DeleteTagTemplateRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete tag template method over gRPC. - - Deletes a tag template and all tags that use it. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see `Data - Catalog resource - project `__. - - Returns: - Callable[[~.DeleteTagTemplateRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_tag_template' not in self._stubs: - self._stubs['delete_tag_template'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/DeleteTagTemplate', - request_serializer=datacatalog.DeleteTagTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_tag_template'] - - @property - def create_tag_template_field(self) -> Callable[ - [datacatalog.CreateTagTemplateFieldRequest], - tags.TagTemplateField]: - r"""Return a callable for the create tag template field method over gRPC. - - Creates a field in a tag template. - - You must enable the Data Catalog API in the project identified - by the ``parent`` parameter. For more information, see `Data - Catalog resource - project `__. - - Returns: - Callable[[~.CreateTagTemplateFieldRequest], - ~.TagTemplateField]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_tag_template_field' not in self._stubs: - self._stubs['create_tag_template_field'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/CreateTagTemplateField', - request_serializer=datacatalog.CreateTagTemplateFieldRequest.serialize, - response_deserializer=tags.TagTemplateField.deserialize, - ) - return self._stubs['create_tag_template_field'] - - @property - def update_tag_template_field(self) -> Callable[ - [datacatalog.UpdateTagTemplateFieldRequest], - tags.TagTemplateField]: - r"""Return a callable for the update tag template field method over gRPC. - - Updates a field in a tag template. - - You can't update the field type with this method. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see `Data - Catalog resource - project `__. - - Returns: - Callable[[~.UpdateTagTemplateFieldRequest], - ~.TagTemplateField]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_tag_template_field' not in self._stubs: - self._stubs['update_tag_template_field'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/UpdateTagTemplateField', - request_serializer=datacatalog.UpdateTagTemplateFieldRequest.serialize, - response_deserializer=tags.TagTemplateField.deserialize, - ) - return self._stubs['update_tag_template_field'] - - @property - def rename_tag_template_field(self) -> Callable[ - [datacatalog.RenameTagTemplateFieldRequest], - tags.TagTemplateField]: - r"""Return a callable for the rename tag template field method over gRPC. - - Renames a field in a tag template. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see [Data - Catalog resource project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project). - - Returns: - Callable[[~.RenameTagTemplateFieldRequest], - ~.TagTemplateField]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rename_tag_template_field' not in self._stubs: - self._stubs['rename_tag_template_field'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/RenameTagTemplateField', - request_serializer=datacatalog.RenameTagTemplateFieldRequest.serialize, - response_deserializer=tags.TagTemplateField.deserialize, - ) - return self._stubs['rename_tag_template_field'] - - @property - def rename_tag_template_field_enum_value(self) -> Callable[ - [datacatalog.RenameTagTemplateFieldEnumValueRequest], - tags.TagTemplateField]: - r"""Return a callable for the rename tag template field enum - value method over gRPC. - - Renames an enum value in a tag template. - - Within a single enum field, enum values must be unique. - - Returns: - Callable[[~.RenameTagTemplateFieldEnumValueRequest], - ~.TagTemplateField]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rename_tag_template_field_enum_value' not in self._stubs: - self._stubs['rename_tag_template_field_enum_value'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/RenameTagTemplateFieldEnumValue', - request_serializer=datacatalog.RenameTagTemplateFieldEnumValueRequest.serialize, - response_deserializer=tags.TagTemplateField.deserialize, - ) - return self._stubs['rename_tag_template_field_enum_value'] - - @property - def delete_tag_template_field(self) -> Callable[ - [datacatalog.DeleteTagTemplateFieldRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete tag template field method over gRPC. - - Deletes a field in a tag template and all uses of this field - from the tags based on this template. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see `Data - Catalog resource - project `__. - - Returns: - Callable[[~.DeleteTagTemplateFieldRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_tag_template_field' not in self._stubs: - self._stubs['delete_tag_template_field'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/DeleteTagTemplateField', - request_serializer=datacatalog.DeleteTagTemplateFieldRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_tag_template_field'] - - @property - def create_tag(self) -> Callable[ - [datacatalog.CreateTagRequest], - tags.Tag]: - r"""Return a callable for the create tag method over gRPC. - - Creates a tag and assigns it to: - - - An [Entry][google.cloud.datacatalog.v1.Entry] if the method - name is - ``projects.locations.entryGroups.entries.tags.create``. - - Or [EntryGroup][google.cloud.datacatalog.v1.EntryGroup]if the - method name is - ``projects.locations.entryGroups.tags.create``. - - Note: The project identified by the ``parent`` parameter for the - [tag] - (https://cloud.google.com/data-catalog/docs/reference/rest/v1/projects.locations.entryGroups.entries.tags/create#path-parameters) - and the [tag template] - (https://cloud.google.com/data-catalog/docs/reference/rest/v1/projects.locations.tagTemplates/create#path-parameters) - used to create the tag must be in the same organization. - - Returns: - Callable[[~.CreateTagRequest], - ~.Tag]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_tag' not in self._stubs: - self._stubs['create_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/CreateTag', - request_serializer=datacatalog.CreateTagRequest.serialize, - response_deserializer=tags.Tag.deserialize, - ) - return self._stubs['create_tag'] - - @property - def update_tag(self) -> Callable[ - [datacatalog.UpdateTagRequest], - tags.Tag]: - r"""Return a callable for the update tag method over gRPC. - - Updates an existing tag. - - Returns: - Callable[[~.UpdateTagRequest], - ~.Tag]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_tag' not in self._stubs: - self._stubs['update_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/UpdateTag', - request_serializer=datacatalog.UpdateTagRequest.serialize, - response_deserializer=tags.Tag.deserialize, - ) - return self._stubs['update_tag'] - - @property - def delete_tag(self) -> Callable[ - [datacatalog.DeleteTagRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete tag method over gRPC. - - Deletes a tag. - - Returns: - Callable[[~.DeleteTagRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_tag' not in self._stubs: - self._stubs['delete_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/DeleteTag', - request_serializer=datacatalog.DeleteTagRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_tag'] - - @property - def list_tags(self) -> Callable[ - [datacatalog.ListTagsRequest], - datacatalog.ListTagsResponse]: - r"""Return a callable for the list tags method over gRPC. - - Lists tags assigned to an - [Entry][google.cloud.datacatalog.v1.Entry]. The - [columns][google.cloud.datacatalog.v1.Tag.column] in the - response are lowercased. - - Returns: - Callable[[~.ListTagsRequest], - ~.ListTagsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_tags' not in self._stubs: - self._stubs['list_tags'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/ListTags', - request_serializer=datacatalog.ListTagsRequest.serialize, - response_deserializer=datacatalog.ListTagsResponse.deserialize, - ) - return self._stubs['list_tags'] - - @property - def reconcile_tags(self) -> Callable[ - [datacatalog.ReconcileTagsRequest], - operations_pb2.Operation]: - r"""Return a callable for the reconcile tags method over gRPC. - - ``ReconcileTags`` creates or updates a list of tags on the - entry. If the - [ReconcileTagsRequest.force_delete_missing][google.cloud.datacatalog.v1.ReconcileTagsRequest.force_delete_missing] - parameter is set, the operation deletes tags not included in the - input tag list. - - ``ReconcileTags`` returns a [long-running operation] - [google.longrunning.Operation] resource that can be queried with - [Operations.GetOperation][google.longrunning.Operations.GetOperation] - to return [ReconcileTagsMetadata] - [google.cloud.datacatalog.v1.ReconcileTagsMetadata] and a - [ReconcileTagsResponse] - [google.cloud.datacatalog.v1.ReconcileTagsResponse] message. - - Returns: - Callable[[~.ReconcileTagsRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'reconcile_tags' not in self._stubs: - self._stubs['reconcile_tags'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/ReconcileTags', - request_serializer=datacatalog.ReconcileTagsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['reconcile_tags'] - - @property - def star_entry(self) -> Callable[ - [datacatalog.StarEntryRequest], - datacatalog.StarEntryResponse]: - r"""Return a callable for the star entry method over gRPC. - - Marks an [Entry][google.cloud.datacatalog.v1.Entry] as starred - by the current user. Starring information is private to each - user. - - Returns: - Callable[[~.StarEntryRequest], - ~.StarEntryResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'star_entry' not in self._stubs: - self._stubs['star_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/StarEntry', - request_serializer=datacatalog.StarEntryRequest.serialize, - response_deserializer=datacatalog.StarEntryResponse.deserialize, - ) - return self._stubs['star_entry'] - - @property - def unstar_entry(self) -> Callable[ - [datacatalog.UnstarEntryRequest], - datacatalog.UnstarEntryResponse]: - r"""Return a callable for the unstar entry method over gRPC. - - Marks an [Entry][google.cloud.datacatalog.v1.Entry] as NOT - starred by the current user. Starring information is private to - each user. - - Returns: - Callable[[~.UnstarEntryRequest], - ~.UnstarEntryResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'unstar_entry' not in self._stubs: - self._stubs['unstar_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/UnstarEntry', - request_serializer=datacatalog.UnstarEntryRequest.serialize, - response_deserializer=datacatalog.UnstarEntryResponse.deserialize, - ) - return self._stubs['unstar_entry'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets an access control policy for a resource. Replaces any - existing policy. - - Supported resources are: - - - Tag templates - - Entry groups - - Note: This method sets policies only within Data Catalog and - can't be used to manage policies in BigQuery, Pub/Sub, Dataproc - Metastore, and any external Google Cloud Platform resources - synced with the Data Catalog. - - To call this method, you must have the following Google IAM - permissions: - - - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on - tag templates. - - ``datacatalog.entryGroups.setIamPolicy`` to set policies on - entry groups. - - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for a resource. - - May return: - - - A\ ``NOT_FOUND`` error if the resource doesn't exist or you - don't have the permission to view it. - - An empty policy if the resource exists but doesn't have a set - policy. - - Supported resources are: - - - Tag templates - - Entry groups - - Note: This method doesn't get policies from Google Cloud - Platform resources ingested into Data Catalog. - - To call this method, you must have the following Google IAM - permissions: - - - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on - tag templates. - - ``datacatalog.entryGroups.getIamPolicy`` to get policies on - entry groups. - - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - r"""Return a callable for the test iam permissions method over gRPC. - - Gets your permissions on a resource. - - Returns an empty set of permissions if the resource - doesn't exist. - - Supported resources are: - - - Tag templates - - Entry groups - - Note: This method gets policies only within Data Catalog - and can't be used to get policies from BigQuery, - Pub/Sub, Dataproc Metastore, and any external Google - Cloud Platform resources ingested into Data Catalog. - - No Google IAM permissions are required to call this - method. - - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - @property - def import_entries(self) -> Callable[ - [datacatalog.ImportEntriesRequest], - operations_pb2.Operation]: - r"""Return a callable for the import entries method over gRPC. - - Imports entries from a source, such as data previously dumped - into a Cloud Storage bucket, into Data Catalog. Import of - entries is a sync operation that reconciles the state of the - third-party system with the Data Catalog. - - ``ImportEntries`` accepts source data snapshots of a third-party - system. Snapshot should be delivered as a .wire or - base65-encoded .txt file containing a sequence of Protocol - Buffer messages of - [DumpItem][google.cloud.datacatalog.v1.DumpItem] type. - - ``ImportEntries`` returns a [long-running operation] - [google.longrunning.Operation] resource that can be queried with - [Operations.GetOperation][google.longrunning.Operations.GetOperation] - to return - [ImportEntriesMetadata][google.cloud.datacatalog.v1.ImportEntriesMetadata] - and an - [ImportEntriesResponse][google.cloud.datacatalog.v1.ImportEntriesResponse] - message. - - Returns: - Callable[[~.ImportEntriesRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'import_entries' not in self._stubs: - self._stubs['import_entries'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/ImportEntries', - request_serializer=datacatalog.ImportEntriesRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['import_entries'] - - def close(self): - self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'DataCatalogGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py deleted file mode 100644 index 83b1800e7811..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py +++ /dev/null @@ -1,1468 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.datacatalog_v1.types import datacatalog -from google.cloud.datacatalog_v1.types import tags -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO -from .grpc import DataCatalogGrpcTransport - - -class DataCatalogGrpcAsyncIOTransport(DataCatalogTransport): - """gRPC AsyncIO backend transport for DataCatalog. - - Data Catalog API service allows you to discover, understand, - and manage your data. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def search_catalog(self) -> Callable[ - [datacatalog.SearchCatalogRequest], - Awaitable[datacatalog.SearchCatalogResponse]]: - r"""Return a callable for the search catalog method over gRPC. - - Searches Data Catalog for multiple resources like entries and - tags that match a query. - - This is a [Custom Method] - (https://cloud.google.com/apis/design/custom_methods) that - doesn't return all information on a resource, only its ID and - high level fields. To get more information, you can subsequently - call specific get methods. - - Note: Data Catalog search queries don't guarantee full recall. - Results that match your query might not be returned, even in - subsequent result pages. Additionally, returned (and not - returned) results can vary if you repeat search queries. - - For more information, see [Data Catalog search syntax] - (https://cloud.google.com/data-catalog/docs/how-to/search-reference). - - Returns: - Callable[[~.SearchCatalogRequest], - Awaitable[~.SearchCatalogResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_catalog' not in self._stubs: - self._stubs['search_catalog'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/SearchCatalog', - request_serializer=datacatalog.SearchCatalogRequest.serialize, - response_deserializer=datacatalog.SearchCatalogResponse.deserialize, - ) - return self._stubs['search_catalog'] - - @property - def create_entry_group(self) -> Callable[ - [datacatalog.CreateEntryGroupRequest], - Awaitable[datacatalog.EntryGroup]]: - r"""Return a callable for the create entry group method over gRPC. - - Creates an entry group. - - An entry group contains logically related entries together with - `Cloud Identity and Access - Management `__ policies. These - policies specify users who can create, edit, and view entries - within entry groups. - - Data Catalog automatically creates entry groups with names that - start with the ``@`` symbol for the following resources: - - - BigQuery entries (``@bigquery``) - - Pub/Sub topics (``@pubsub``) - - Dataproc Metastore services - (``@dataproc_metastore_{SERVICE_NAME_HASH}``) - - You can create your own entry groups for Cloud Storage fileset - entries and custom entries together with the corresponding IAM - policies. User-created entry groups can't contain the ``@`` - symbol, it is reserved for automatically created groups. - - Entry groups, like entries, can be searched. - - A maximum of 10,000 entry groups may be created per organization - across all locations. - - You must enable the Data Catalog API in the project identified - by the ``parent`` parameter. For more information, see `Data - Catalog resource - project `__. - - Returns: - Callable[[~.CreateEntryGroupRequest], - Awaitable[~.EntryGroup]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entry_group' not in self._stubs: - self._stubs['create_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/CreateEntryGroup', - request_serializer=datacatalog.CreateEntryGroupRequest.serialize, - response_deserializer=datacatalog.EntryGroup.deserialize, - ) - return self._stubs['create_entry_group'] - - @property - def get_entry_group(self) -> Callable[ - [datacatalog.GetEntryGroupRequest], - Awaitable[datacatalog.EntryGroup]]: - r"""Return a callable for the get entry group method over gRPC. - - Gets an entry group. - - Returns: - Callable[[~.GetEntryGroupRequest], - Awaitable[~.EntryGroup]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entry_group' not in self._stubs: - self._stubs['get_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/GetEntryGroup', - request_serializer=datacatalog.GetEntryGroupRequest.serialize, - response_deserializer=datacatalog.EntryGroup.deserialize, - ) - return self._stubs['get_entry_group'] - - @property - def update_entry_group(self) -> Callable[ - [datacatalog.UpdateEntryGroupRequest], - Awaitable[datacatalog.EntryGroup]]: - r"""Return a callable for the update entry group method over gRPC. - - Updates an entry group. - - You must enable the Data Catalog API in the project identified - by the ``entry_group.name`` parameter. For more information, see - `Data Catalog resource - project `__. - - Returns: - Callable[[~.UpdateEntryGroupRequest], - Awaitable[~.EntryGroup]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entry_group' not in self._stubs: - self._stubs['update_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/UpdateEntryGroup', - request_serializer=datacatalog.UpdateEntryGroupRequest.serialize, - response_deserializer=datacatalog.EntryGroup.deserialize, - ) - return self._stubs['update_entry_group'] - - @property - def delete_entry_group(self) -> Callable[ - [datacatalog.DeleteEntryGroupRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete entry group method over gRPC. - - Deletes an entry group. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see `Data - Catalog resource - project `__. - - Returns: - Callable[[~.DeleteEntryGroupRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entry_group' not in self._stubs: - self._stubs['delete_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/DeleteEntryGroup', - request_serializer=datacatalog.DeleteEntryGroupRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_entry_group'] - - @property - def list_entry_groups(self) -> Callable[ - [datacatalog.ListEntryGroupsRequest], - Awaitable[datacatalog.ListEntryGroupsResponse]]: - r"""Return a callable for the list entry groups method over gRPC. - - Lists entry groups. - - Returns: - Callable[[~.ListEntryGroupsRequest], - Awaitable[~.ListEntryGroupsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entry_groups' not in self._stubs: - self._stubs['list_entry_groups'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/ListEntryGroups', - request_serializer=datacatalog.ListEntryGroupsRequest.serialize, - response_deserializer=datacatalog.ListEntryGroupsResponse.deserialize, - ) - return self._stubs['list_entry_groups'] - - @property - def create_entry(self) -> Callable[ - [datacatalog.CreateEntryRequest], - Awaitable[datacatalog.Entry]]: - r"""Return a callable for the create entry method over gRPC. - - Creates an entry. - - You can create entries only with 'FILESET', 'CLUSTER', - 'DATA_STREAM', or custom types. Data Catalog automatically - creates entries with other types during metadata ingestion from - integrated systems. - - You must enable the Data Catalog API in the project identified - by the ``parent`` parameter. For more information, see `Data - Catalog resource - project `__. - - An entry group can have a maximum of 100,000 entries. - - Returns: - Callable[[~.CreateEntryRequest], - Awaitable[~.Entry]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entry' not in self._stubs: - self._stubs['create_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/CreateEntry', - request_serializer=datacatalog.CreateEntryRequest.serialize, - response_deserializer=datacatalog.Entry.deserialize, - ) - return self._stubs['create_entry'] - - @property - def update_entry(self) -> Callable[ - [datacatalog.UpdateEntryRequest], - Awaitable[datacatalog.Entry]]: - r"""Return a callable for the update entry method over gRPC. - - Updates an existing entry. - - You must enable the Data Catalog API in the project identified - by the ``entry.name`` parameter. For more information, see `Data - Catalog resource - project `__. - - Returns: - Callable[[~.UpdateEntryRequest], - Awaitable[~.Entry]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entry' not in self._stubs: - self._stubs['update_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/UpdateEntry', - request_serializer=datacatalog.UpdateEntryRequest.serialize, - response_deserializer=datacatalog.Entry.deserialize, - ) - return self._stubs['update_entry'] - - @property - def delete_entry(self) -> Callable[ - [datacatalog.DeleteEntryRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete entry method over gRPC. - - Deletes an existing entry. - - You can delete only the entries created by the - [CreateEntry][google.cloud.datacatalog.v1.DataCatalog.CreateEntry] - method. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see `Data - Catalog resource - project `__. - - Returns: - Callable[[~.DeleteEntryRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entry' not in self._stubs: - self._stubs['delete_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/DeleteEntry', - request_serializer=datacatalog.DeleteEntryRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_entry'] - - @property - def get_entry(self) -> Callable[ - [datacatalog.GetEntryRequest], - Awaitable[datacatalog.Entry]]: - r"""Return a callable for the get entry method over gRPC. - - Gets an entry. - - Returns: - Callable[[~.GetEntryRequest], - Awaitable[~.Entry]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entry' not in self._stubs: - self._stubs['get_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/GetEntry', - request_serializer=datacatalog.GetEntryRequest.serialize, - response_deserializer=datacatalog.Entry.deserialize, - ) - return self._stubs['get_entry'] - - @property - def lookup_entry(self) -> Callable[ - [datacatalog.LookupEntryRequest], - Awaitable[datacatalog.Entry]]: - r"""Return a callable for the lookup entry method over gRPC. - - Gets an entry by its target resource name. - - The resource name comes from the source Google Cloud - Platform service. - - Returns: - Callable[[~.LookupEntryRequest], - Awaitable[~.Entry]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'lookup_entry' not in self._stubs: - self._stubs['lookup_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/LookupEntry', - request_serializer=datacatalog.LookupEntryRequest.serialize, - response_deserializer=datacatalog.Entry.deserialize, - ) - return self._stubs['lookup_entry'] - - @property - def list_entries(self) -> Callable[ - [datacatalog.ListEntriesRequest], - Awaitable[datacatalog.ListEntriesResponse]]: - r"""Return a callable for the list entries method over gRPC. - - Lists entries. - - Note: Currently, this method can list only custom entries. To - get a list of both custom and automatically created entries, use - [SearchCatalog][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog]. - - Returns: - Callable[[~.ListEntriesRequest], - Awaitable[~.ListEntriesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entries' not in self._stubs: - self._stubs['list_entries'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/ListEntries', - request_serializer=datacatalog.ListEntriesRequest.serialize, - response_deserializer=datacatalog.ListEntriesResponse.deserialize, - ) - return self._stubs['list_entries'] - - @property - def modify_entry_overview(self) -> Callable[ - [datacatalog.ModifyEntryOverviewRequest], - Awaitable[datacatalog.EntryOverview]]: - r"""Return a callable for the modify entry overview method over gRPC. - - Modifies entry overview, part of the business context of an - [Entry][google.cloud.datacatalog.v1.Entry]. - - To call this method, you must have the - ``datacatalog.entries.updateOverview`` IAM permission on the - corresponding project. - - Returns: - Callable[[~.ModifyEntryOverviewRequest], - Awaitable[~.EntryOverview]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'modify_entry_overview' not in self._stubs: - self._stubs['modify_entry_overview'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/ModifyEntryOverview', - request_serializer=datacatalog.ModifyEntryOverviewRequest.serialize, - response_deserializer=datacatalog.EntryOverview.deserialize, - ) - return self._stubs['modify_entry_overview'] - - @property - def modify_entry_contacts(self) -> Callable[ - [datacatalog.ModifyEntryContactsRequest], - Awaitable[datacatalog.Contacts]]: - r"""Return a callable for the modify entry contacts method over gRPC. - - Modifies contacts, part of the business context of an - [Entry][google.cloud.datacatalog.v1.Entry]. - - To call this method, you must have the - ``datacatalog.entries.updateContacts`` IAM permission on the - corresponding project. - - Returns: - Callable[[~.ModifyEntryContactsRequest], - Awaitable[~.Contacts]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'modify_entry_contacts' not in self._stubs: - self._stubs['modify_entry_contacts'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/ModifyEntryContacts', - request_serializer=datacatalog.ModifyEntryContactsRequest.serialize, - response_deserializer=datacatalog.Contacts.deserialize, - ) - return self._stubs['modify_entry_contacts'] - - @property - def create_tag_template(self) -> Callable[ - [datacatalog.CreateTagTemplateRequest], - Awaitable[tags.TagTemplate]]: - r"""Return a callable for the create tag template method over gRPC. - - Creates a tag template. - - You must enable the Data Catalog API in the project identified - by the ``parent`` parameter. For more information, see [Data - Catalog resource project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project). - - Returns: - Callable[[~.CreateTagTemplateRequest], - Awaitable[~.TagTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_tag_template' not in self._stubs: - self._stubs['create_tag_template'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/CreateTagTemplate', - request_serializer=datacatalog.CreateTagTemplateRequest.serialize, - response_deserializer=tags.TagTemplate.deserialize, - ) - return self._stubs['create_tag_template'] - - @property - def get_tag_template(self) -> Callable[ - [datacatalog.GetTagTemplateRequest], - Awaitable[tags.TagTemplate]]: - r"""Return a callable for the get tag template method over gRPC. - - Gets a tag template. - - Returns: - Callable[[~.GetTagTemplateRequest], - Awaitable[~.TagTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_tag_template' not in self._stubs: - self._stubs['get_tag_template'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/GetTagTemplate', - request_serializer=datacatalog.GetTagTemplateRequest.serialize, - response_deserializer=tags.TagTemplate.deserialize, - ) - return self._stubs['get_tag_template'] - - @property - def update_tag_template(self) -> Callable[ - [datacatalog.UpdateTagTemplateRequest], - Awaitable[tags.TagTemplate]]: - r"""Return a callable for the update tag template method over gRPC. - - Updates a tag template. - - You can't update template fields with this method. These fields - are separate resources with their own create, update, and delete - methods. - - You must enable the Data Catalog API in the project identified - by the ``tag_template.name`` parameter. For more information, - see `Data Catalog resource - project `__. - - Returns: - Callable[[~.UpdateTagTemplateRequest], - Awaitable[~.TagTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_tag_template' not in self._stubs: - self._stubs['update_tag_template'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/UpdateTagTemplate', - request_serializer=datacatalog.UpdateTagTemplateRequest.serialize, - response_deserializer=tags.TagTemplate.deserialize, - ) - return self._stubs['update_tag_template'] - - @property - def delete_tag_template(self) -> Callable[ - [datacatalog.DeleteTagTemplateRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete tag template method over gRPC. - - Deletes a tag template and all tags that use it. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see `Data - Catalog resource - project `__. - - Returns: - Callable[[~.DeleteTagTemplateRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_tag_template' not in self._stubs: - self._stubs['delete_tag_template'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/DeleteTagTemplate', - request_serializer=datacatalog.DeleteTagTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_tag_template'] - - @property - def create_tag_template_field(self) -> Callable[ - [datacatalog.CreateTagTemplateFieldRequest], - Awaitable[tags.TagTemplateField]]: - r"""Return a callable for the create tag template field method over gRPC. - - Creates a field in a tag template. - - You must enable the Data Catalog API in the project identified - by the ``parent`` parameter. For more information, see `Data - Catalog resource - project `__. - - Returns: - Callable[[~.CreateTagTemplateFieldRequest], - Awaitable[~.TagTemplateField]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_tag_template_field' not in self._stubs: - self._stubs['create_tag_template_field'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/CreateTagTemplateField', - request_serializer=datacatalog.CreateTagTemplateFieldRequest.serialize, - response_deserializer=tags.TagTemplateField.deserialize, - ) - return self._stubs['create_tag_template_field'] - - @property - def update_tag_template_field(self) -> Callable[ - [datacatalog.UpdateTagTemplateFieldRequest], - Awaitable[tags.TagTemplateField]]: - r"""Return a callable for the update tag template field method over gRPC. - - Updates a field in a tag template. - - You can't update the field type with this method. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see `Data - Catalog resource - project `__. - - Returns: - Callable[[~.UpdateTagTemplateFieldRequest], - Awaitable[~.TagTemplateField]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_tag_template_field' not in self._stubs: - self._stubs['update_tag_template_field'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/UpdateTagTemplateField', - request_serializer=datacatalog.UpdateTagTemplateFieldRequest.serialize, - response_deserializer=tags.TagTemplateField.deserialize, - ) - return self._stubs['update_tag_template_field'] - - @property - def rename_tag_template_field(self) -> Callable[ - [datacatalog.RenameTagTemplateFieldRequest], - Awaitable[tags.TagTemplateField]]: - r"""Return a callable for the rename tag template field method over gRPC. - - Renames a field in a tag template. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see [Data - Catalog resource project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project). - - Returns: - Callable[[~.RenameTagTemplateFieldRequest], - Awaitable[~.TagTemplateField]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rename_tag_template_field' not in self._stubs: - self._stubs['rename_tag_template_field'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/RenameTagTemplateField', - request_serializer=datacatalog.RenameTagTemplateFieldRequest.serialize, - response_deserializer=tags.TagTemplateField.deserialize, - ) - return self._stubs['rename_tag_template_field'] - - @property - def rename_tag_template_field_enum_value(self) -> Callable[ - [datacatalog.RenameTagTemplateFieldEnumValueRequest], - Awaitable[tags.TagTemplateField]]: - r"""Return a callable for the rename tag template field enum - value method over gRPC. - - Renames an enum value in a tag template. - - Within a single enum field, enum values must be unique. - - Returns: - Callable[[~.RenameTagTemplateFieldEnumValueRequest], - Awaitable[~.TagTemplateField]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rename_tag_template_field_enum_value' not in self._stubs: - self._stubs['rename_tag_template_field_enum_value'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/RenameTagTemplateFieldEnumValue', - request_serializer=datacatalog.RenameTagTemplateFieldEnumValueRequest.serialize, - response_deserializer=tags.TagTemplateField.deserialize, - ) - return self._stubs['rename_tag_template_field_enum_value'] - - @property - def delete_tag_template_field(self) -> Callable[ - [datacatalog.DeleteTagTemplateFieldRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete tag template field method over gRPC. - - Deletes a field in a tag template and all uses of this field - from the tags based on this template. - - You must enable the Data Catalog API in the project identified - by the ``name`` parameter. For more information, see `Data - Catalog resource - project `__. - - Returns: - Callable[[~.DeleteTagTemplateFieldRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_tag_template_field' not in self._stubs: - self._stubs['delete_tag_template_field'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/DeleteTagTemplateField', - request_serializer=datacatalog.DeleteTagTemplateFieldRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_tag_template_field'] - - @property - def create_tag(self) -> Callable[ - [datacatalog.CreateTagRequest], - Awaitable[tags.Tag]]: - r"""Return a callable for the create tag method over gRPC. - - Creates a tag and assigns it to: - - - An [Entry][google.cloud.datacatalog.v1.Entry] if the method - name is - ``projects.locations.entryGroups.entries.tags.create``. - - Or [EntryGroup][google.cloud.datacatalog.v1.EntryGroup]if the - method name is - ``projects.locations.entryGroups.tags.create``. - - Note: The project identified by the ``parent`` parameter for the - [tag] - (https://cloud.google.com/data-catalog/docs/reference/rest/v1/projects.locations.entryGroups.entries.tags/create#path-parameters) - and the [tag template] - (https://cloud.google.com/data-catalog/docs/reference/rest/v1/projects.locations.tagTemplates/create#path-parameters) - used to create the tag must be in the same organization. - - Returns: - Callable[[~.CreateTagRequest], - Awaitable[~.Tag]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_tag' not in self._stubs: - self._stubs['create_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/CreateTag', - request_serializer=datacatalog.CreateTagRequest.serialize, - response_deserializer=tags.Tag.deserialize, - ) - return self._stubs['create_tag'] - - @property - def update_tag(self) -> Callable[ - [datacatalog.UpdateTagRequest], - Awaitable[tags.Tag]]: - r"""Return a callable for the update tag method over gRPC. - - Updates an existing tag. - - Returns: - Callable[[~.UpdateTagRequest], - Awaitable[~.Tag]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_tag' not in self._stubs: - self._stubs['update_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/UpdateTag', - request_serializer=datacatalog.UpdateTagRequest.serialize, - response_deserializer=tags.Tag.deserialize, - ) - return self._stubs['update_tag'] - - @property - def delete_tag(self) -> Callable[ - [datacatalog.DeleteTagRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete tag method over gRPC. - - Deletes a tag. - - Returns: - Callable[[~.DeleteTagRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_tag' not in self._stubs: - self._stubs['delete_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/DeleteTag', - request_serializer=datacatalog.DeleteTagRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_tag'] - - @property - def list_tags(self) -> Callable[ - [datacatalog.ListTagsRequest], - Awaitable[datacatalog.ListTagsResponse]]: - r"""Return a callable for the list tags method over gRPC. - - Lists tags assigned to an - [Entry][google.cloud.datacatalog.v1.Entry]. The - [columns][google.cloud.datacatalog.v1.Tag.column] in the - response are lowercased. - - Returns: - Callable[[~.ListTagsRequest], - Awaitable[~.ListTagsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_tags' not in self._stubs: - self._stubs['list_tags'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/ListTags', - request_serializer=datacatalog.ListTagsRequest.serialize, - response_deserializer=datacatalog.ListTagsResponse.deserialize, - ) - return self._stubs['list_tags'] - - @property - def reconcile_tags(self) -> Callable[ - [datacatalog.ReconcileTagsRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the reconcile tags method over gRPC. - - ``ReconcileTags`` creates or updates a list of tags on the - entry. If the - [ReconcileTagsRequest.force_delete_missing][google.cloud.datacatalog.v1.ReconcileTagsRequest.force_delete_missing] - parameter is set, the operation deletes tags not included in the - input tag list. - - ``ReconcileTags`` returns a [long-running operation] - [google.longrunning.Operation] resource that can be queried with - [Operations.GetOperation][google.longrunning.Operations.GetOperation] - to return [ReconcileTagsMetadata] - [google.cloud.datacatalog.v1.ReconcileTagsMetadata] and a - [ReconcileTagsResponse] - [google.cloud.datacatalog.v1.ReconcileTagsResponse] message. - - Returns: - Callable[[~.ReconcileTagsRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'reconcile_tags' not in self._stubs: - self._stubs['reconcile_tags'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/ReconcileTags', - request_serializer=datacatalog.ReconcileTagsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['reconcile_tags'] - - @property - def star_entry(self) -> Callable[ - [datacatalog.StarEntryRequest], - Awaitable[datacatalog.StarEntryResponse]]: - r"""Return a callable for the star entry method over gRPC. - - Marks an [Entry][google.cloud.datacatalog.v1.Entry] as starred - by the current user. Starring information is private to each - user. - - Returns: - Callable[[~.StarEntryRequest], - Awaitable[~.StarEntryResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'star_entry' not in self._stubs: - self._stubs['star_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/StarEntry', - request_serializer=datacatalog.StarEntryRequest.serialize, - response_deserializer=datacatalog.StarEntryResponse.deserialize, - ) - return self._stubs['star_entry'] - - @property - def unstar_entry(self) -> Callable[ - [datacatalog.UnstarEntryRequest], - Awaitable[datacatalog.UnstarEntryResponse]]: - r"""Return a callable for the unstar entry method over gRPC. - - Marks an [Entry][google.cloud.datacatalog.v1.Entry] as NOT - starred by the current user. Starring information is private to - each user. - - Returns: - Callable[[~.UnstarEntryRequest], - Awaitable[~.UnstarEntryResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'unstar_entry' not in self._stubs: - self._stubs['unstar_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/UnstarEntry', - request_serializer=datacatalog.UnstarEntryRequest.serialize, - response_deserializer=datacatalog.UnstarEntryResponse.deserialize, - ) - return self._stubs['unstar_entry'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets an access control policy for a resource. Replaces any - existing policy. - - Supported resources are: - - - Tag templates - - Entry groups - - Note: This method sets policies only within Data Catalog and - can't be used to manage policies in BigQuery, Pub/Sub, Dataproc - Metastore, and any external Google Cloud Platform resources - synced with the Data Catalog. - - To call this method, you must have the following Google IAM - permissions: - - - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on - tag templates. - - ``datacatalog.entryGroups.setIamPolicy`` to set policies on - entry groups. - - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for a resource. - - May return: - - - A\ ``NOT_FOUND`` error if the resource doesn't exist or you - don't have the permission to view it. - - An empty policy if the resource exists but doesn't have a set - policy. - - Supported resources are: - - - Tag templates - - Entry groups - - Note: This method doesn't get policies from Google Cloud - Platform resources ingested into Data Catalog. - - To call this method, you must have the following Google IAM - permissions: - - - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on - tag templates. - - ``datacatalog.entryGroups.getIamPolicy`` to get policies on - entry groups. - - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: - r"""Return a callable for the test iam permissions method over gRPC. - - Gets your permissions on a resource. - - Returns an empty set of permissions if the resource - doesn't exist. - - Supported resources are: - - - Tag templates - - Entry groups - - Note: This method gets policies only within Data Catalog - and can't be used to get policies from BigQuery, - Pub/Sub, Dataproc Metastore, and any external Google - Cloud Platform resources ingested into Data Catalog. - - No Google IAM permissions are required to call this - method. - - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - @property - def import_entries(self) -> Callable[ - [datacatalog.ImportEntriesRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the import entries method over gRPC. - - Imports entries from a source, such as data previously dumped - into a Cloud Storage bucket, into Data Catalog. Import of - entries is a sync operation that reconciles the state of the - third-party system with the Data Catalog. - - ``ImportEntries`` accepts source data snapshots of a third-party - system. Snapshot should be delivered as a .wire or - base65-encoded .txt file containing a sequence of Protocol - Buffer messages of - [DumpItem][google.cloud.datacatalog.v1.DumpItem] type. - - ``ImportEntries`` returns a [long-running operation] - [google.longrunning.Operation] resource that can be queried with - [Operations.GetOperation][google.longrunning.Operations.GetOperation] - to return - [ImportEntriesMetadata][google.cloud.datacatalog.v1.ImportEntriesMetadata] - and an - [ImportEntriesResponse][google.cloud.datacatalog.v1.ImportEntriesResponse] - message. - - Returns: - Callable[[~.ImportEntriesRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'import_entries' not in self._stubs: - self._stubs['import_entries'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.DataCatalog/ImportEntries', - request_serializer=datacatalog.ImportEntriesRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['import_entries'] - - def close(self): - return self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - -__all__ = ( - 'DataCatalogGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/__init__.py deleted file mode 100644 index cde5f3a05abb..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import PolicyTagManagerClient -from .async_client import PolicyTagManagerAsyncClient - -__all__ = ( - 'PolicyTagManagerClient', - 'PolicyTagManagerAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py deleted file mode 100644 index 8c2dfaf97b90..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/async_client.py +++ /dev/null @@ -1,1819 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.datacatalog_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.datacatalog_v1.services.policy_tag_manager import pagers -from google.cloud.datacatalog_v1.types import policytagmanager -from google.cloud.datacatalog_v1.types import timestamps -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .transports.base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport -from .client import PolicyTagManagerClient - - -class PolicyTagManagerAsyncClient: - """Policy Tag Manager API service allows you to manage your - policy tags and taxonomies. - - Policy tags are used to tag BigQuery columns and apply - additional access control policies. A taxonomy is a hierarchical - grouping of policy tags that classify data along a common axis. - """ - - _client: PolicyTagManagerClient - - DEFAULT_ENDPOINT = PolicyTagManagerClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = PolicyTagManagerClient.DEFAULT_MTLS_ENDPOINT - - policy_tag_path = staticmethod(PolicyTagManagerClient.policy_tag_path) - parse_policy_tag_path = staticmethod(PolicyTagManagerClient.parse_policy_tag_path) - taxonomy_path = staticmethod(PolicyTagManagerClient.taxonomy_path) - parse_taxonomy_path = staticmethod(PolicyTagManagerClient.parse_taxonomy_path) - common_billing_account_path = staticmethod(PolicyTagManagerClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(PolicyTagManagerClient.parse_common_billing_account_path) - common_folder_path = staticmethod(PolicyTagManagerClient.common_folder_path) - parse_common_folder_path = staticmethod(PolicyTagManagerClient.parse_common_folder_path) - common_organization_path = staticmethod(PolicyTagManagerClient.common_organization_path) - parse_common_organization_path = staticmethod(PolicyTagManagerClient.parse_common_organization_path) - common_project_path = staticmethod(PolicyTagManagerClient.common_project_path) - parse_common_project_path = staticmethod(PolicyTagManagerClient.parse_common_project_path) - common_location_path = staticmethod(PolicyTagManagerClient.common_location_path) - parse_common_location_path = staticmethod(PolicyTagManagerClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PolicyTagManagerAsyncClient: The constructed client. - """ - return PolicyTagManagerClient.from_service_account_info.__func__(PolicyTagManagerAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PolicyTagManagerAsyncClient: The constructed client. - """ - return PolicyTagManagerClient.from_service_account_file.__func__(PolicyTagManagerAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return PolicyTagManagerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> PolicyTagManagerTransport: - """Returns the transport used by the client instance. - - Returns: - PolicyTagManagerTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(PolicyTagManagerClient).get_transport_class, type(PolicyTagManagerClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, PolicyTagManagerTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the policy tag manager client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.PolicyTagManagerTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = PolicyTagManagerClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_taxonomy(self, - request: Optional[Union[policytagmanager.CreateTaxonomyRequest, dict]] = None, - *, - parent: Optional[str] = None, - taxonomy: Optional[policytagmanager.Taxonomy] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.Taxonomy: - r"""Creates a taxonomy in a specified project. - - The taxonomy is initially empty, that is, it doesn't - contain policy tags. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_create_taxonomy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.CreateTaxonomyRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_taxonomy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.CreateTaxonomyRequest, dict]]): - The request object. Request message for - [CreateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.CreateTaxonomy]. - parent (:class:`str`): - Required. Resource name of the - project that the taxonomy will belong - to. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - taxonomy (:class:`google.cloud.datacatalog_v1.types.Taxonomy`): - The taxonomy to create. - This corresponds to the ``taxonomy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.Taxonomy: - A taxonomy is a collection of hierarchical policy tags that classify data - along a common axis. - - For example, a "data sensitivity" taxonomy might - contain the following policy tags: - - :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` - - A "data origin" taxonomy might contain the following - policy tags: - - :literal:`\` + User data + Employee data + Partner data + Public data`\ \` - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, taxonomy]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = policytagmanager.CreateTaxonomyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if taxonomy is not None: - request.taxonomy = taxonomy - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_taxonomy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_taxonomy(self, - request: Optional[Union[policytagmanager.DeleteTaxonomyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a taxonomy, including all policy tags in this - taxonomy, their associated policies, and the policy tags - references from BigQuery columns. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_delete_taxonomy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteTaxonomyRequest( - name="name_value", - ) - - # Make the request - await client.delete_taxonomy(request=request) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.DeleteTaxonomyRequest, dict]]): - The request object. Request message for - [DeleteTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.DeleteTaxonomy]. - name (:class:`str`): - Required. Resource name of the - taxonomy to delete. - Note: All policy tags in this taxonomy - are also deleted. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = policytagmanager.DeleteTaxonomyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_taxonomy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def update_taxonomy(self, - request: Optional[Union[policytagmanager.UpdateTaxonomyRequest, dict]] = None, - *, - taxonomy: Optional[policytagmanager.Taxonomy] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.Taxonomy: - r"""Updates a taxonomy, including its display name, - description, and activated policy types. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_update_taxonomy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.UpdateTaxonomyRequest( - ) - - # Make the request - response = await client.update_taxonomy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.UpdateTaxonomyRequest, dict]]): - The request object. Request message for - [UpdateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.UpdateTaxonomy]. - taxonomy (:class:`google.cloud.datacatalog_v1.types.Taxonomy`): - The taxonomy to update. You can - update only its description, display - name, and activated policy types. - - This corresponds to the ``taxonomy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.Taxonomy: - A taxonomy is a collection of hierarchical policy tags that classify data - along a common axis. - - For example, a "data sensitivity" taxonomy might - contain the following policy tags: - - :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` - - A "data origin" taxonomy might contain the following - policy tags: - - :literal:`\` + User data + Employee data + Partner data + Public data`\ \` - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([taxonomy]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = policytagmanager.UpdateTaxonomyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if taxonomy is not None: - request.taxonomy = taxonomy - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_taxonomy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("taxonomy.name", request.taxonomy.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_taxonomies(self, - request: Optional[Union[policytagmanager.ListTaxonomiesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTaxonomiesAsyncPager: - r"""Lists all taxonomies in a project in a particular - location that you have a permission to view. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_list_taxonomies(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.ListTaxonomiesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_taxonomies(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.ListTaxonomiesRequest, dict]]): - The request object. Request message for - [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. - parent (:class:`str`): - Required. Resource name of the - project to list the taxonomies of. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListTaxonomiesAsyncPager: - Response message for - [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = policytagmanager.ListTaxonomiesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_taxonomies, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListTaxonomiesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_taxonomy(self, - request: Optional[Union[policytagmanager.GetTaxonomyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.Taxonomy: - r"""Gets a taxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_get_taxonomy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.GetTaxonomyRequest( - name="name_value", - ) - - # Make the request - response = await client.get_taxonomy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.GetTaxonomyRequest, dict]]): - The request object. Request message for - [GetTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.GetTaxonomy]. - name (:class:`str`): - Required. Resource name of the - taxonomy to get. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.Taxonomy: - A taxonomy is a collection of hierarchical policy tags that classify data - along a common axis. - - For example, a "data sensitivity" taxonomy might - contain the following policy tags: - - :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` - - A "data origin" taxonomy might contain the following - policy tags: - - :literal:`\` + User data + Employee data + Partner data + Public data`\ \` - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = policytagmanager.GetTaxonomyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_taxonomy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_policy_tag(self, - request: Optional[Union[policytagmanager.CreatePolicyTagRequest, dict]] = None, - *, - parent: Optional[str] = None, - policy_tag: Optional[policytagmanager.PolicyTag] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.PolicyTag: - r"""Creates a policy tag in a taxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_create_policy_tag(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.CreatePolicyTagRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_policy_tag(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.CreatePolicyTagRequest, dict]]): - The request object. Request message for - [CreatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.CreatePolicyTag]. - parent (:class:`str`): - Required. Resource name of the - taxonomy that the policy tag will belong - to. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - policy_tag (:class:`google.cloud.datacatalog_v1.types.PolicyTag`): - The policy tag to create. - This corresponds to the ``policy_tag`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.PolicyTag: - Denotes one policy tag in a taxonomy, for example, SSN. - - Policy tags can be defined in a hierarchy. For - example: - - :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` - - Where the "Geolocation" policy tag contains three - children. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, policy_tag]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = policytagmanager.CreatePolicyTagRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if policy_tag is not None: - request.policy_tag = policy_tag - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_policy_tag, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_policy_tag(self, - request: Optional[Union[policytagmanager.DeletePolicyTagRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a policy tag together with the following: - - - All of its descendant policy tags, if any - - Policies associated with the policy tag and its descendants - - References from BigQuery table schema of the policy tag and - its descendants - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_delete_policy_tag(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeletePolicyTagRequest( - name="name_value", - ) - - # Make the request - await client.delete_policy_tag(request=request) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.DeletePolicyTagRequest, dict]]): - The request object. Request message for - [DeletePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.DeletePolicyTag]. - name (:class:`str`): - Required. Resource name of the policy - tag to delete. - Note: All of its descendant policy tags - are also deleted. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = policytagmanager.DeletePolicyTagRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_policy_tag, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def update_policy_tag(self, - request: Optional[Union[policytagmanager.UpdatePolicyTagRequest, dict]] = None, - *, - policy_tag: Optional[policytagmanager.PolicyTag] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.PolicyTag: - r"""Updates a policy tag, including its display - name, description, and parent policy tag. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_update_policy_tag(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.UpdatePolicyTagRequest( - ) - - # Make the request - response = await client.update_policy_tag(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.UpdatePolicyTagRequest, dict]]): - The request object. Request message for - [UpdatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.UpdatePolicyTag]. - policy_tag (:class:`google.cloud.datacatalog_v1.types.PolicyTag`): - The policy tag to update. You can - update only its description, display - name, and parent policy tag fields. - - This corresponds to the ``policy_tag`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.PolicyTag: - Denotes one policy tag in a taxonomy, for example, SSN. - - Policy tags can be defined in a hierarchy. For - example: - - :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` - - Where the "Geolocation" policy tag contains three - children. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([policy_tag]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = policytagmanager.UpdatePolicyTagRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if policy_tag is not None: - request.policy_tag = policy_tag - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_policy_tag, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("policy_tag.name", request.policy_tag.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_policy_tags(self, - request: Optional[Union[policytagmanager.ListPolicyTagsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListPolicyTagsAsyncPager: - r"""Lists all policy tags in a taxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_list_policy_tags(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.ListPolicyTagsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_policy_tags(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.ListPolicyTagsRequest, dict]]): - The request object. Request message for - [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. - parent (:class:`str`): - Required. Resource name of the - taxonomy to list the policy tags of. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListPolicyTagsAsyncPager: - Response message for - [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = policytagmanager.ListPolicyTagsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_policy_tags, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListPolicyTagsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_policy_tag(self, - request: Optional[Union[policytagmanager.GetPolicyTagRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.PolicyTag: - r"""Gets a policy tag. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_get_policy_tag(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.GetPolicyTagRequest( - name="name_value", - ) - - # Make the request - response = await client.get_policy_tag(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.GetPolicyTagRequest, dict]]): - The request object. Request message for - [GetPolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.GetPolicyTag]. - name (:class:`str`): - Required. Resource name of the policy - tag. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.PolicyTag: - Denotes one policy tag in a taxonomy, for example, SSN. - - Policy tags can be defined in a hierarchy. For - example: - - :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` - - Where the "Geolocation" policy tag contains three - children. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = policytagmanager.GetPolicyTagRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_policy_tag, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM policy for a policy tag or a taxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_get_iam_policy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): - The request object. Request message for ``GetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM policy for a policy tag or a taxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_set_iam_policy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns your permissions on a specified policy tag or - taxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_test_iam_permissions(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def __aenter__(self) -> "PolicyTagManagerAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "PolicyTagManagerAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py deleted file mode 100644 index a7efe8b14108..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py +++ /dev/null @@ -1,2029 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.datacatalog_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.datacatalog_v1.services.policy_tag_manager import pagers -from google.cloud.datacatalog_v1.types import policytagmanager -from google.cloud.datacatalog_v1.types import timestamps -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .transports.base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import PolicyTagManagerGrpcTransport -from .transports.grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport - - -class PolicyTagManagerClientMeta(type): - """Metaclass for the PolicyTagManager client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[PolicyTagManagerTransport]] - _transport_registry["grpc"] = PolicyTagManagerGrpcTransport - _transport_registry["grpc_asyncio"] = PolicyTagManagerGrpcAsyncIOTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[PolicyTagManagerTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class PolicyTagManagerClient(metaclass=PolicyTagManagerClientMeta): - """Policy Tag Manager API service allows you to manage your - policy tags and taxonomies. - - Policy tags are used to tag BigQuery columns and apply - additional access control policies. A taxonomy is a hierarchical - grouping of policy tags that classify data along a common axis. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "datacatalog.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PolicyTagManagerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PolicyTagManagerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> PolicyTagManagerTransport: - """Returns the transport used by the client instance. - - Returns: - PolicyTagManagerTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def policy_tag_path(project: str,location: str,taxonomy: str,policy_tag: str,) -> str: - """Returns a fully-qualified policy_tag string.""" - return "projects/{project}/locations/{location}/taxonomies/{taxonomy}/policyTags/{policy_tag}".format(project=project, location=location, taxonomy=taxonomy, policy_tag=policy_tag, ) - - @staticmethod - def parse_policy_tag_path(path: str) -> Dict[str,str]: - """Parses a policy_tag path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/taxonomies/(?P.+?)/policyTags/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def taxonomy_path(project: str,location: str,taxonomy: str,) -> str: - """Returns a fully-qualified taxonomy string.""" - return "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format(project=project, location=location, taxonomy=taxonomy, ) - - @staticmethod - def parse_taxonomy_path(path: str) -> Dict[str,str]: - """Parses a taxonomy path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/taxonomies/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, PolicyTagManagerTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the policy tag manager client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, PolicyTagManagerTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, PolicyTagManagerTransport): - # transport is a PolicyTagManagerTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def create_taxonomy(self, - request: Optional[Union[policytagmanager.CreateTaxonomyRequest, dict]] = None, - *, - parent: Optional[str] = None, - taxonomy: Optional[policytagmanager.Taxonomy] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.Taxonomy: - r"""Creates a taxonomy in a specified project. - - The taxonomy is initially empty, that is, it doesn't - contain policy tags. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_create_taxonomy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1.CreateTaxonomyRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_taxonomy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.CreateTaxonomyRequest, dict]): - The request object. Request message for - [CreateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.CreateTaxonomy]. - parent (str): - Required. Resource name of the - project that the taxonomy will belong - to. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - taxonomy (google.cloud.datacatalog_v1.types.Taxonomy): - The taxonomy to create. - This corresponds to the ``taxonomy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.Taxonomy: - A taxonomy is a collection of hierarchical policy tags that classify data - along a common axis. - - For example, a "data sensitivity" taxonomy might - contain the following policy tags: - - :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` - - A "data origin" taxonomy might contain the following - policy tags: - - :literal:`\` + User data + Employee data + Partner data + Public data`\ \` - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, taxonomy]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanager.CreateTaxonomyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanager.CreateTaxonomyRequest): - request = policytagmanager.CreateTaxonomyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if taxonomy is not None: - request.taxonomy = taxonomy - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_taxonomy(self, - request: Optional[Union[policytagmanager.DeleteTaxonomyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a taxonomy, including all policy tags in this - taxonomy, their associated policies, and the policy tags - references from BigQuery columns. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_delete_taxonomy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteTaxonomyRequest( - name="name_value", - ) - - # Make the request - client.delete_taxonomy(request=request) - - Args: - request (Union[google.cloud.datacatalog_v1.types.DeleteTaxonomyRequest, dict]): - The request object. Request message for - [DeleteTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.DeleteTaxonomy]. - name (str): - Required. Resource name of the - taxonomy to delete. - Note: All policy tags in this taxonomy - are also deleted. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanager.DeleteTaxonomyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanager.DeleteTaxonomyRequest): - request = policytagmanager.DeleteTaxonomyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def update_taxonomy(self, - request: Optional[Union[policytagmanager.UpdateTaxonomyRequest, dict]] = None, - *, - taxonomy: Optional[policytagmanager.Taxonomy] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.Taxonomy: - r"""Updates a taxonomy, including its display name, - description, and activated policy types. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_update_taxonomy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1.UpdateTaxonomyRequest( - ) - - # Make the request - response = client.update_taxonomy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.UpdateTaxonomyRequest, dict]): - The request object. Request message for - [UpdateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.UpdateTaxonomy]. - taxonomy (google.cloud.datacatalog_v1.types.Taxonomy): - The taxonomy to update. You can - update only its description, display - name, and activated policy types. - - This corresponds to the ``taxonomy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.Taxonomy: - A taxonomy is a collection of hierarchical policy tags that classify data - along a common axis. - - For example, a "data sensitivity" taxonomy might - contain the following policy tags: - - :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` - - A "data origin" taxonomy might contain the following - policy tags: - - :literal:`\` + User data + Employee data + Partner data + Public data`\ \` - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([taxonomy]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanager.UpdateTaxonomyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanager.UpdateTaxonomyRequest): - request = policytagmanager.UpdateTaxonomyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if taxonomy is not None: - request.taxonomy = taxonomy - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("taxonomy.name", request.taxonomy.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_taxonomies(self, - request: Optional[Union[policytagmanager.ListTaxonomiesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTaxonomiesPager: - r"""Lists all taxonomies in a project in a particular - location that you have a permission to view. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_list_taxonomies(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1.ListTaxonomiesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_taxonomies(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.ListTaxonomiesRequest, dict]): - The request object. Request message for - [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. - parent (str): - Required. Resource name of the - project to list the taxonomies of. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListTaxonomiesPager: - Response message for - [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanager.ListTaxonomiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanager.ListTaxonomiesRequest): - request = policytagmanager.ListTaxonomiesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_taxonomies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListTaxonomiesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_taxonomy(self, - request: Optional[Union[policytagmanager.GetTaxonomyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.Taxonomy: - r"""Gets a taxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_get_taxonomy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1.GetTaxonomyRequest( - name="name_value", - ) - - # Make the request - response = client.get_taxonomy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.GetTaxonomyRequest, dict]): - The request object. Request message for - [GetTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.GetTaxonomy]. - name (str): - Required. Resource name of the - taxonomy to get. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.Taxonomy: - A taxonomy is a collection of hierarchical policy tags that classify data - along a common axis. - - For example, a "data sensitivity" taxonomy might - contain the following policy tags: - - :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` - - A "data origin" taxonomy might contain the following - policy tags: - - :literal:`\` + User data + Employee data + Partner data + Public data`\ \` - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanager.GetTaxonomyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanager.GetTaxonomyRequest): - request = policytagmanager.GetTaxonomyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_policy_tag(self, - request: Optional[Union[policytagmanager.CreatePolicyTagRequest, dict]] = None, - *, - parent: Optional[str] = None, - policy_tag: Optional[policytagmanager.PolicyTag] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.PolicyTag: - r"""Creates a policy tag in a taxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_create_policy_tag(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1.CreatePolicyTagRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_policy_tag(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.CreatePolicyTagRequest, dict]): - The request object. Request message for - [CreatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.CreatePolicyTag]. - parent (str): - Required. Resource name of the - taxonomy that the policy tag will belong - to. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - policy_tag (google.cloud.datacatalog_v1.types.PolicyTag): - The policy tag to create. - This corresponds to the ``policy_tag`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.PolicyTag: - Denotes one policy tag in a taxonomy, for example, SSN. - - Policy tags can be defined in a hierarchy. For - example: - - :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` - - Where the "Geolocation" policy tag contains three - children. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, policy_tag]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanager.CreatePolicyTagRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanager.CreatePolicyTagRequest): - request = policytagmanager.CreatePolicyTagRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if policy_tag is not None: - request.policy_tag = policy_tag - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_policy_tag] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_policy_tag(self, - request: Optional[Union[policytagmanager.DeletePolicyTagRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a policy tag together with the following: - - - All of its descendant policy tags, if any - - Policies associated with the policy tag and its descendants - - References from BigQuery table schema of the policy tag and - its descendants - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_delete_policy_tag(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeletePolicyTagRequest( - name="name_value", - ) - - # Make the request - client.delete_policy_tag(request=request) - - Args: - request (Union[google.cloud.datacatalog_v1.types.DeletePolicyTagRequest, dict]): - The request object. Request message for - [DeletePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.DeletePolicyTag]. - name (str): - Required. Resource name of the policy - tag to delete. - Note: All of its descendant policy tags - are also deleted. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanager.DeletePolicyTagRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanager.DeletePolicyTagRequest): - request = policytagmanager.DeletePolicyTagRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_policy_tag] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def update_policy_tag(self, - request: Optional[Union[policytagmanager.UpdatePolicyTagRequest, dict]] = None, - *, - policy_tag: Optional[policytagmanager.PolicyTag] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.PolicyTag: - r"""Updates a policy tag, including its display - name, description, and parent policy tag. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_update_policy_tag(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1.UpdatePolicyTagRequest( - ) - - # Make the request - response = client.update_policy_tag(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.UpdatePolicyTagRequest, dict]): - The request object. Request message for - [UpdatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.UpdatePolicyTag]. - policy_tag (google.cloud.datacatalog_v1.types.PolicyTag): - The policy tag to update. You can - update only its description, display - name, and parent policy tag fields. - - This corresponds to the ``policy_tag`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.PolicyTag: - Denotes one policy tag in a taxonomy, for example, SSN. - - Policy tags can be defined in a hierarchy. For - example: - - :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` - - Where the "Geolocation" policy tag contains three - children. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([policy_tag]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanager.UpdatePolicyTagRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanager.UpdatePolicyTagRequest): - request = policytagmanager.UpdatePolicyTagRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if policy_tag is not None: - request.policy_tag = policy_tag - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_policy_tag] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("policy_tag.name", request.policy_tag.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_policy_tags(self, - request: Optional[Union[policytagmanager.ListPolicyTagsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListPolicyTagsPager: - r"""Lists all policy tags in a taxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_list_policy_tags(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1.ListPolicyTagsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_policy_tags(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.ListPolicyTagsRequest, dict]): - The request object. Request message for - [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. - parent (str): - Required. Resource name of the - taxonomy to list the policy tags of. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListPolicyTagsPager: - Response message for - [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanager.ListPolicyTagsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanager.ListPolicyTagsRequest): - request = policytagmanager.ListPolicyTagsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_policy_tags] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListPolicyTagsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_policy_tag(self, - request: Optional[Union[policytagmanager.GetPolicyTagRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.PolicyTag: - r"""Gets a policy tag. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_get_policy_tag(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1.GetPolicyTagRequest( - name="name_value", - ) - - # Make the request - response = client.get_policy_tag(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.GetPolicyTagRequest, dict]): - The request object. Request message for - [GetPolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.GetPolicyTag]. - name (str): - Required. Resource name of the policy - tag. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.PolicyTag: - Denotes one policy tag in a taxonomy, for example, SSN. - - Policy tags can be defined in a hierarchy. For - example: - - :literal:`\` + Geolocation + LatLong + City + ZipCode`\ \` - - Where the "Geolocation" policy tag contains three - children. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanager.GetPolicyTagRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanager.GetPolicyTagRequest): - request = policytagmanager.GetPolicyTagRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_policy_tag] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM policy for a policy tag or a taxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_get_iam_policy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): - The request object. Request message for ``GetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM policy for a policy tag or a taxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_set_iam_policy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns your permissions on a specified policy tag or - taxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_test_iam_permissions(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "PolicyTagManagerClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "PolicyTagManagerClient", -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/pagers.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/pagers.py deleted file mode 100644 index f5323f720e10..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/pagers.py +++ /dev/null @@ -1,260 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.datacatalog_v1.types import policytagmanager - - -class ListTaxonomiesPager: - """A pager for iterating through ``list_taxonomies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1.types.ListTaxonomiesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``taxonomies`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTaxonomies`` requests and continue to iterate - through the ``taxonomies`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1.types.ListTaxonomiesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., policytagmanager.ListTaxonomiesResponse], - request: policytagmanager.ListTaxonomiesRequest, - response: policytagmanager.ListTaxonomiesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1.types.ListTaxonomiesRequest): - The initial request object. - response (google.cloud.datacatalog_v1.types.ListTaxonomiesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = policytagmanager.ListTaxonomiesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[policytagmanager.ListTaxonomiesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[policytagmanager.Taxonomy]: - for page in self.pages: - yield from page.taxonomies - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTaxonomiesAsyncPager: - """A pager for iterating through ``list_taxonomies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1.types.ListTaxonomiesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``taxonomies`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListTaxonomies`` requests and continue to iterate - through the ``taxonomies`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1.types.ListTaxonomiesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[policytagmanager.ListTaxonomiesResponse]], - request: policytagmanager.ListTaxonomiesRequest, - response: policytagmanager.ListTaxonomiesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1.types.ListTaxonomiesRequest): - The initial request object. - response (google.cloud.datacatalog_v1.types.ListTaxonomiesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = policytagmanager.ListTaxonomiesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[policytagmanager.ListTaxonomiesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[policytagmanager.Taxonomy]: - async def async_generator(): - async for page in self.pages: - for response in page.taxonomies: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListPolicyTagsPager: - """A pager for iterating through ``list_policy_tags`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1.types.ListPolicyTagsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``policy_tags`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListPolicyTags`` requests and continue to iterate - through the ``policy_tags`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1.types.ListPolicyTagsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., policytagmanager.ListPolicyTagsResponse], - request: policytagmanager.ListPolicyTagsRequest, - response: policytagmanager.ListPolicyTagsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1.types.ListPolicyTagsRequest): - The initial request object. - response (google.cloud.datacatalog_v1.types.ListPolicyTagsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = policytagmanager.ListPolicyTagsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[policytagmanager.ListPolicyTagsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[policytagmanager.PolicyTag]: - for page in self.pages: - yield from page.policy_tags - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListPolicyTagsAsyncPager: - """A pager for iterating through ``list_policy_tags`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1.types.ListPolicyTagsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``policy_tags`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListPolicyTags`` requests and continue to iterate - through the ``policy_tags`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1.types.ListPolicyTagsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[policytagmanager.ListPolicyTagsResponse]], - request: policytagmanager.ListPolicyTagsRequest, - response: policytagmanager.ListPolicyTagsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1.types.ListPolicyTagsRequest): - The initial request object. - response (google.cloud.datacatalog_v1.types.ListPolicyTagsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = policytagmanager.ListPolicyTagsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[policytagmanager.ListPolicyTagsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[policytagmanager.PolicyTag]: - async def async_generator(): - async for page in self.pages: - for response in page.policy_tags: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/__init__.py deleted file mode 100644 index 192f3e97b958..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import PolicyTagManagerTransport -from .grpc import PolicyTagManagerGrpcTransport -from .grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[PolicyTagManagerTransport]] -_transport_registry['grpc'] = PolicyTagManagerGrpcTransport -_transport_registry['grpc_asyncio'] = PolicyTagManagerGrpcAsyncIOTransport - -__all__ = ( - 'PolicyTagManagerTransport', - 'PolicyTagManagerGrpcTransport', - 'PolicyTagManagerGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py deleted file mode 100644 index 7c841038bf6f..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/base.py +++ /dev/null @@ -1,356 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.datacatalog_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.datacatalog_v1.types import policytagmanager -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class PolicyTagManagerTransport(abc.ABC): - """Abstract transport class for PolicyTagManager.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'datacatalog.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_taxonomy: gapic_v1.method.wrap_method( - self.create_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.delete_taxonomy: gapic_v1.method.wrap_method( - self.delete_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.update_taxonomy: gapic_v1.method.wrap_method( - self.update_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.list_taxonomies: gapic_v1.method.wrap_method( - self.list_taxonomies, - default_timeout=None, - client_info=client_info, - ), - self.get_taxonomy: gapic_v1.method.wrap_method( - self.get_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.create_policy_tag: gapic_v1.method.wrap_method( - self.create_policy_tag, - default_timeout=None, - client_info=client_info, - ), - self.delete_policy_tag: gapic_v1.method.wrap_method( - self.delete_policy_tag, - default_timeout=None, - client_info=client_info, - ), - self.update_policy_tag: gapic_v1.method.wrap_method( - self.update_policy_tag, - default_timeout=None, - client_info=client_info, - ), - self.list_policy_tags: gapic_v1.method.wrap_method( - self.list_policy_tags, - default_timeout=None, - client_info=client_info, - ), - self.get_policy_tag: gapic_v1.method.wrap_method( - self.get_policy_tag, - default_timeout=None, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_taxonomy(self) -> Callable[ - [policytagmanager.CreateTaxonomyRequest], - Union[ - policytagmanager.Taxonomy, - Awaitable[policytagmanager.Taxonomy] - ]]: - raise NotImplementedError() - - @property - def delete_taxonomy(self) -> Callable[ - [policytagmanager.DeleteTaxonomyRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def update_taxonomy(self) -> Callable[ - [policytagmanager.UpdateTaxonomyRequest], - Union[ - policytagmanager.Taxonomy, - Awaitable[policytagmanager.Taxonomy] - ]]: - raise NotImplementedError() - - @property - def list_taxonomies(self) -> Callable[ - [policytagmanager.ListTaxonomiesRequest], - Union[ - policytagmanager.ListTaxonomiesResponse, - Awaitable[policytagmanager.ListTaxonomiesResponse] - ]]: - raise NotImplementedError() - - @property - def get_taxonomy(self) -> Callable[ - [policytagmanager.GetTaxonomyRequest], - Union[ - policytagmanager.Taxonomy, - Awaitable[policytagmanager.Taxonomy] - ]]: - raise NotImplementedError() - - @property - def create_policy_tag(self) -> Callable[ - [policytagmanager.CreatePolicyTagRequest], - Union[ - policytagmanager.PolicyTag, - Awaitable[policytagmanager.PolicyTag] - ]]: - raise NotImplementedError() - - @property - def delete_policy_tag(self) -> Callable[ - [policytagmanager.DeletePolicyTagRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def update_policy_tag(self) -> Callable[ - [policytagmanager.UpdatePolicyTagRequest], - Union[ - policytagmanager.PolicyTag, - Awaitable[policytagmanager.PolicyTag] - ]]: - raise NotImplementedError() - - @property - def list_policy_tags(self) -> Callable[ - [policytagmanager.ListPolicyTagsRequest], - Union[ - policytagmanager.ListPolicyTagsResponse, - Awaitable[policytagmanager.ListPolicyTagsResponse] - ]]: - raise NotImplementedError() - - @property - def get_policy_tag(self) -> Callable[ - [policytagmanager.GetPolicyTagRequest], - Union[ - policytagmanager.PolicyTag, - Awaitable[policytagmanager.PolicyTag] - ]]: - raise NotImplementedError() - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'PolicyTagManagerTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py deleted file mode 100644 index 7e126ee7d7c2..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc.py +++ /dev/null @@ -1,671 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.datacatalog_v1.types import policytagmanager -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO - - -class PolicyTagManagerGrpcTransport(PolicyTagManagerTransport): - """gRPC backend transport for PolicyTagManager. - - Policy Tag Manager API service allows you to manage your - policy tags and taxonomies. - - Policy tags are used to tag BigQuery columns and apply - additional access control policies. A taxonomy is a hierarchical - grouping of policy tags that classify data along a common axis. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_taxonomy(self) -> Callable[ - [policytagmanager.CreateTaxonomyRequest], - policytagmanager.Taxonomy]: - r"""Return a callable for the create taxonomy method over gRPC. - - Creates a taxonomy in a specified project. - - The taxonomy is initially empty, that is, it doesn't - contain policy tags. - - Returns: - Callable[[~.CreateTaxonomyRequest], - ~.Taxonomy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_taxonomy' not in self._stubs: - self._stubs['create_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/CreateTaxonomy', - request_serializer=policytagmanager.CreateTaxonomyRequest.serialize, - response_deserializer=policytagmanager.Taxonomy.deserialize, - ) - return self._stubs['create_taxonomy'] - - @property - def delete_taxonomy(self) -> Callable[ - [policytagmanager.DeleteTaxonomyRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete taxonomy method over gRPC. - - Deletes a taxonomy, including all policy tags in this - taxonomy, their associated policies, and the policy tags - references from BigQuery columns. - - Returns: - Callable[[~.DeleteTaxonomyRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_taxonomy' not in self._stubs: - self._stubs['delete_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/DeleteTaxonomy', - request_serializer=policytagmanager.DeleteTaxonomyRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_taxonomy'] - - @property - def update_taxonomy(self) -> Callable[ - [policytagmanager.UpdateTaxonomyRequest], - policytagmanager.Taxonomy]: - r"""Return a callable for the update taxonomy method over gRPC. - - Updates a taxonomy, including its display name, - description, and activated policy types. - - Returns: - Callable[[~.UpdateTaxonomyRequest], - ~.Taxonomy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_taxonomy' not in self._stubs: - self._stubs['update_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/UpdateTaxonomy', - request_serializer=policytagmanager.UpdateTaxonomyRequest.serialize, - response_deserializer=policytagmanager.Taxonomy.deserialize, - ) - return self._stubs['update_taxonomy'] - - @property - def list_taxonomies(self) -> Callable[ - [policytagmanager.ListTaxonomiesRequest], - policytagmanager.ListTaxonomiesResponse]: - r"""Return a callable for the list taxonomies method over gRPC. - - Lists all taxonomies in a project in a particular - location that you have a permission to view. - - Returns: - Callable[[~.ListTaxonomiesRequest], - ~.ListTaxonomiesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_taxonomies' not in self._stubs: - self._stubs['list_taxonomies'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/ListTaxonomies', - request_serializer=policytagmanager.ListTaxonomiesRequest.serialize, - response_deserializer=policytagmanager.ListTaxonomiesResponse.deserialize, - ) - return self._stubs['list_taxonomies'] - - @property - def get_taxonomy(self) -> Callable[ - [policytagmanager.GetTaxonomyRequest], - policytagmanager.Taxonomy]: - r"""Return a callable for the get taxonomy method over gRPC. - - Gets a taxonomy. - - Returns: - Callable[[~.GetTaxonomyRequest], - ~.Taxonomy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_taxonomy' not in self._stubs: - self._stubs['get_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/GetTaxonomy', - request_serializer=policytagmanager.GetTaxonomyRequest.serialize, - response_deserializer=policytagmanager.Taxonomy.deserialize, - ) - return self._stubs['get_taxonomy'] - - @property - def create_policy_tag(self) -> Callable[ - [policytagmanager.CreatePolicyTagRequest], - policytagmanager.PolicyTag]: - r"""Return a callable for the create policy tag method over gRPC. - - Creates a policy tag in a taxonomy. - - Returns: - Callable[[~.CreatePolicyTagRequest], - ~.PolicyTag]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_policy_tag' not in self._stubs: - self._stubs['create_policy_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/CreatePolicyTag', - request_serializer=policytagmanager.CreatePolicyTagRequest.serialize, - response_deserializer=policytagmanager.PolicyTag.deserialize, - ) - return self._stubs['create_policy_tag'] - - @property - def delete_policy_tag(self) -> Callable[ - [policytagmanager.DeletePolicyTagRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete policy tag method over gRPC. - - Deletes a policy tag together with the following: - - - All of its descendant policy tags, if any - - Policies associated with the policy tag and its descendants - - References from BigQuery table schema of the policy tag and - its descendants - - Returns: - Callable[[~.DeletePolicyTagRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_policy_tag' not in self._stubs: - self._stubs['delete_policy_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/DeletePolicyTag', - request_serializer=policytagmanager.DeletePolicyTagRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_policy_tag'] - - @property - def update_policy_tag(self) -> Callable[ - [policytagmanager.UpdatePolicyTagRequest], - policytagmanager.PolicyTag]: - r"""Return a callable for the update policy tag method over gRPC. - - Updates a policy tag, including its display - name, description, and parent policy tag. - - Returns: - Callable[[~.UpdatePolicyTagRequest], - ~.PolicyTag]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_policy_tag' not in self._stubs: - self._stubs['update_policy_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/UpdatePolicyTag', - request_serializer=policytagmanager.UpdatePolicyTagRequest.serialize, - response_deserializer=policytagmanager.PolicyTag.deserialize, - ) - return self._stubs['update_policy_tag'] - - @property - def list_policy_tags(self) -> Callable[ - [policytagmanager.ListPolicyTagsRequest], - policytagmanager.ListPolicyTagsResponse]: - r"""Return a callable for the list policy tags method over gRPC. - - Lists all policy tags in a taxonomy. - - Returns: - Callable[[~.ListPolicyTagsRequest], - ~.ListPolicyTagsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_policy_tags' not in self._stubs: - self._stubs['list_policy_tags'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/ListPolicyTags', - request_serializer=policytagmanager.ListPolicyTagsRequest.serialize, - response_deserializer=policytagmanager.ListPolicyTagsResponse.deserialize, - ) - return self._stubs['list_policy_tags'] - - @property - def get_policy_tag(self) -> Callable[ - [policytagmanager.GetPolicyTagRequest], - policytagmanager.PolicyTag]: - r"""Return a callable for the get policy tag method over gRPC. - - Gets a policy tag. - - Returns: - Callable[[~.GetPolicyTagRequest], - ~.PolicyTag]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_policy_tag' not in self._stubs: - self._stubs['get_policy_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/GetPolicyTag', - request_serializer=policytagmanager.GetPolicyTagRequest.serialize, - response_deserializer=policytagmanager.PolicyTag.deserialize, - ) - return self._stubs['get_policy_tag'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the IAM policy for a policy tag or a taxonomy. - - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the IAM policy for a policy tag or a taxonomy. - - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns your permissions on a specified policy tag or - taxonomy. - - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - def close(self): - self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'PolicyTagManagerGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py deleted file mode 100644 index 800006698a8e..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager/transports/grpc_asyncio.py +++ /dev/null @@ -1,670 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.datacatalog_v1.types import policytagmanager -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO -from .grpc import PolicyTagManagerGrpcTransport - - -class PolicyTagManagerGrpcAsyncIOTransport(PolicyTagManagerTransport): - """gRPC AsyncIO backend transport for PolicyTagManager. - - Policy Tag Manager API service allows you to manage your - policy tags and taxonomies. - - Policy tags are used to tag BigQuery columns and apply - additional access control policies. A taxonomy is a hierarchical - grouping of policy tags that classify data along a common axis. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_taxonomy(self) -> Callable[ - [policytagmanager.CreateTaxonomyRequest], - Awaitable[policytagmanager.Taxonomy]]: - r"""Return a callable for the create taxonomy method over gRPC. - - Creates a taxonomy in a specified project. - - The taxonomy is initially empty, that is, it doesn't - contain policy tags. - - Returns: - Callable[[~.CreateTaxonomyRequest], - Awaitable[~.Taxonomy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_taxonomy' not in self._stubs: - self._stubs['create_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/CreateTaxonomy', - request_serializer=policytagmanager.CreateTaxonomyRequest.serialize, - response_deserializer=policytagmanager.Taxonomy.deserialize, - ) - return self._stubs['create_taxonomy'] - - @property - def delete_taxonomy(self) -> Callable[ - [policytagmanager.DeleteTaxonomyRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete taxonomy method over gRPC. - - Deletes a taxonomy, including all policy tags in this - taxonomy, their associated policies, and the policy tags - references from BigQuery columns. - - Returns: - Callable[[~.DeleteTaxonomyRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_taxonomy' not in self._stubs: - self._stubs['delete_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/DeleteTaxonomy', - request_serializer=policytagmanager.DeleteTaxonomyRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_taxonomy'] - - @property - def update_taxonomy(self) -> Callable[ - [policytagmanager.UpdateTaxonomyRequest], - Awaitable[policytagmanager.Taxonomy]]: - r"""Return a callable for the update taxonomy method over gRPC. - - Updates a taxonomy, including its display name, - description, and activated policy types. - - Returns: - Callable[[~.UpdateTaxonomyRequest], - Awaitable[~.Taxonomy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_taxonomy' not in self._stubs: - self._stubs['update_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/UpdateTaxonomy', - request_serializer=policytagmanager.UpdateTaxonomyRequest.serialize, - response_deserializer=policytagmanager.Taxonomy.deserialize, - ) - return self._stubs['update_taxonomy'] - - @property - def list_taxonomies(self) -> Callable[ - [policytagmanager.ListTaxonomiesRequest], - Awaitable[policytagmanager.ListTaxonomiesResponse]]: - r"""Return a callable for the list taxonomies method over gRPC. - - Lists all taxonomies in a project in a particular - location that you have a permission to view. - - Returns: - Callable[[~.ListTaxonomiesRequest], - Awaitable[~.ListTaxonomiesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_taxonomies' not in self._stubs: - self._stubs['list_taxonomies'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/ListTaxonomies', - request_serializer=policytagmanager.ListTaxonomiesRequest.serialize, - response_deserializer=policytagmanager.ListTaxonomiesResponse.deserialize, - ) - return self._stubs['list_taxonomies'] - - @property - def get_taxonomy(self) -> Callable[ - [policytagmanager.GetTaxonomyRequest], - Awaitable[policytagmanager.Taxonomy]]: - r"""Return a callable for the get taxonomy method over gRPC. - - Gets a taxonomy. - - Returns: - Callable[[~.GetTaxonomyRequest], - Awaitable[~.Taxonomy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_taxonomy' not in self._stubs: - self._stubs['get_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/GetTaxonomy', - request_serializer=policytagmanager.GetTaxonomyRequest.serialize, - response_deserializer=policytagmanager.Taxonomy.deserialize, - ) - return self._stubs['get_taxonomy'] - - @property - def create_policy_tag(self) -> Callable[ - [policytagmanager.CreatePolicyTagRequest], - Awaitable[policytagmanager.PolicyTag]]: - r"""Return a callable for the create policy tag method over gRPC. - - Creates a policy tag in a taxonomy. - - Returns: - Callable[[~.CreatePolicyTagRequest], - Awaitable[~.PolicyTag]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_policy_tag' not in self._stubs: - self._stubs['create_policy_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/CreatePolicyTag', - request_serializer=policytagmanager.CreatePolicyTagRequest.serialize, - response_deserializer=policytagmanager.PolicyTag.deserialize, - ) - return self._stubs['create_policy_tag'] - - @property - def delete_policy_tag(self) -> Callable[ - [policytagmanager.DeletePolicyTagRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete policy tag method over gRPC. - - Deletes a policy tag together with the following: - - - All of its descendant policy tags, if any - - Policies associated with the policy tag and its descendants - - References from BigQuery table schema of the policy tag and - its descendants - - Returns: - Callable[[~.DeletePolicyTagRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_policy_tag' not in self._stubs: - self._stubs['delete_policy_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/DeletePolicyTag', - request_serializer=policytagmanager.DeletePolicyTagRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_policy_tag'] - - @property - def update_policy_tag(self) -> Callable[ - [policytagmanager.UpdatePolicyTagRequest], - Awaitable[policytagmanager.PolicyTag]]: - r"""Return a callable for the update policy tag method over gRPC. - - Updates a policy tag, including its display - name, description, and parent policy tag. - - Returns: - Callable[[~.UpdatePolicyTagRequest], - Awaitable[~.PolicyTag]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_policy_tag' not in self._stubs: - self._stubs['update_policy_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/UpdatePolicyTag', - request_serializer=policytagmanager.UpdatePolicyTagRequest.serialize, - response_deserializer=policytagmanager.PolicyTag.deserialize, - ) - return self._stubs['update_policy_tag'] - - @property - def list_policy_tags(self) -> Callable[ - [policytagmanager.ListPolicyTagsRequest], - Awaitable[policytagmanager.ListPolicyTagsResponse]]: - r"""Return a callable for the list policy tags method over gRPC. - - Lists all policy tags in a taxonomy. - - Returns: - Callable[[~.ListPolicyTagsRequest], - Awaitable[~.ListPolicyTagsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_policy_tags' not in self._stubs: - self._stubs['list_policy_tags'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/ListPolicyTags', - request_serializer=policytagmanager.ListPolicyTagsRequest.serialize, - response_deserializer=policytagmanager.ListPolicyTagsResponse.deserialize, - ) - return self._stubs['list_policy_tags'] - - @property - def get_policy_tag(self) -> Callable[ - [policytagmanager.GetPolicyTagRequest], - Awaitable[policytagmanager.PolicyTag]]: - r"""Return a callable for the get policy tag method over gRPC. - - Gets a policy tag. - - Returns: - Callable[[~.GetPolicyTagRequest], - Awaitable[~.PolicyTag]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_policy_tag' not in self._stubs: - self._stubs['get_policy_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/GetPolicyTag', - request_serializer=policytagmanager.GetPolicyTagRequest.serialize, - response_deserializer=policytagmanager.PolicyTag.deserialize, - ) - return self._stubs['get_policy_tag'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the IAM policy for a policy tag or a taxonomy. - - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the IAM policy for a policy tag or a taxonomy. - - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns your permissions on a specified policy tag or - taxonomy. - - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManager/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - def close(self): - return self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - -__all__ = ( - 'PolicyTagManagerGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/__init__.py deleted file mode 100644 index 0592b8ffb549..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import PolicyTagManagerSerializationClient -from .async_client import PolicyTagManagerSerializationAsyncClient - -__all__ = ( - 'PolicyTagManagerSerializationClient', - 'PolicyTagManagerSerializationAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py deleted file mode 100644 index c610f7072457..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/async_client.py +++ /dev/null @@ -1,699 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.datacatalog_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.datacatalog_v1.types import policytagmanager -from google.cloud.datacatalog_v1.types import policytagmanagerserialization -from google.cloud.datacatalog_v1.types import timestamps -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .transports.base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport -from .client import PolicyTagManagerSerializationClient - - -class PolicyTagManagerSerializationAsyncClient: - """Policy Tag Manager Serialization API service allows you to - manipulate your policy tags and taxonomies in a serialized - format. - - Taxonomy is a hierarchical group of policy tags. - """ - - _client: PolicyTagManagerSerializationClient - - DEFAULT_ENDPOINT = PolicyTagManagerSerializationClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = PolicyTagManagerSerializationClient.DEFAULT_MTLS_ENDPOINT - - taxonomy_path = staticmethod(PolicyTagManagerSerializationClient.taxonomy_path) - parse_taxonomy_path = staticmethod(PolicyTagManagerSerializationClient.parse_taxonomy_path) - common_billing_account_path = staticmethod(PolicyTagManagerSerializationClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(PolicyTagManagerSerializationClient.parse_common_billing_account_path) - common_folder_path = staticmethod(PolicyTagManagerSerializationClient.common_folder_path) - parse_common_folder_path = staticmethod(PolicyTagManagerSerializationClient.parse_common_folder_path) - common_organization_path = staticmethod(PolicyTagManagerSerializationClient.common_organization_path) - parse_common_organization_path = staticmethod(PolicyTagManagerSerializationClient.parse_common_organization_path) - common_project_path = staticmethod(PolicyTagManagerSerializationClient.common_project_path) - parse_common_project_path = staticmethod(PolicyTagManagerSerializationClient.parse_common_project_path) - common_location_path = staticmethod(PolicyTagManagerSerializationClient.common_location_path) - parse_common_location_path = staticmethod(PolicyTagManagerSerializationClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PolicyTagManagerSerializationAsyncClient: The constructed client. - """ - return PolicyTagManagerSerializationClient.from_service_account_info.__func__(PolicyTagManagerSerializationAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PolicyTagManagerSerializationAsyncClient: The constructed client. - """ - return PolicyTagManagerSerializationClient.from_service_account_file.__func__(PolicyTagManagerSerializationAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return PolicyTagManagerSerializationClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> PolicyTagManagerSerializationTransport: - """Returns the transport used by the client instance. - - Returns: - PolicyTagManagerSerializationTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(PolicyTagManagerSerializationClient).get_transport_class, type(PolicyTagManagerSerializationClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, PolicyTagManagerSerializationTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the policy tag manager serialization client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.PolicyTagManagerSerializationTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = PolicyTagManagerSerializationClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def replace_taxonomy(self, - request: Optional[Union[policytagmanagerserialization.ReplaceTaxonomyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.Taxonomy: - r"""Replaces (updates) a taxonomy and all its policy tags. - - The taxonomy and its entire hierarchy of policy tags must be - represented literally by ``SerializedTaxonomy`` and the nested - ``SerializedPolicyTag`` messages. - - This operation automatically does the following: - - - Deletes the existing policy tags that are missing from the - ``SerializedPolicyTag``. - - Creates policy tags that don't have resource names. They are - considered new. - - Updates policy tags with valid resources names accordingly. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_replace_taxonomy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerSerializationAsyncClient() - - # Initialize request argument(s) - serialized_taxonomy = datacatalog_v1.SerializedTaxonomy() - serialized_taxonomy.display_name = "display_name_value" - - request = datacatalog_v1.ReplaceTaxonomyRequest( - name="name_value", - serialized_taxonomy=serialized_taxonomy, - ) - - # Make the request - response = await client.replace_taxonomy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.ReplaceTaxonomyRequest, dict]]): - The request object. Request message for - [ReplaceTaxonomy][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ReplaceTaxonomy]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.Taxonomy: - A taxonomy is a collection of hierarchical policy tags that classify data - along a common axis. - - For example, a "data sensitivity" taxonomy might - contain the following policy tags: - - :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` - - A "data origin" taxonomy might contain the following - policy tags: - - :literal:`\` + User data + Employee data + Partner data + Public data`\ \` - - """ - # Create or coerce a protobuf request object. - request = policytagmanagerserialization.ReplaceTaxonomyRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.replace_taxonomy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def import_taxonomies(self, - request: Optional[Union[policytagmanagerserialization.ImportTaxonomiesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanagerserialization.ImportTaxonomiesResponse: - r"""Creates new taxonomies (including their policy tags) - in a given project by importing from inlined or - cross-regional sources. - - For a cross-regional source, new taxonomies are created - by copying from a source in another region. - - For an inlined source, taxonomies and policy tags are - created in bulk using nested protocol buffer structures. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_import_taxonomies(): - # Create a client - client = datacatalog_v1.PolicyTagManagerSerializationAsyncClient() - - # Initialize request argument(s) - inline_source = datacatalog_v1.InlineSource() - inline_source.taxonomies.display_name = "display_name_value" - - request = datacatalog_v1.ImportTaxonomiesRequest( - inline_source=inline_source, - parent="parent_value", - ) - - # Make the request - response = await client.import_taxonomies(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.ImportTaxonomiesRequest, dict]]): - The request object. Request message for - [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.ImportTaxonomiesResponse: - Response message for - [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. - - """ - # Create or coerce a protobuf request object. - request = policytagmanagerserialization.ImportTaxonomiesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.import_taxonomies, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def export_taxonomies(self, - request: Optional[Union[policytagmanagerserialization.ExportTaxonomiesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanagerserialization.ExportTaxonomiesResponse: - r"""Exports taxonomies in the requested type and returns them, - including their policy tags. The requested taxonomies must - belong to the same project. - - This method generates ``SerializedTaxonomy`` protocol buffers - with nested policy tags that can be used as input for - ``ImportTaxonomies`` calls. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - async def sample_export_taxonomies(): - # Create a client - client = datacatalog_v1.PolicyTagManagerSerializationAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.ExportTaxonomiesRequest( - serialized_taxonomies=True, - parent="parent_value", - taxonomies=['taxonomies_value1', 'taxonomies_value2'], - ) - - # Make the request - response = await client.export_taxonomies(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1.types.ExportTaxonomiesRequest, dict]]): - The request object. Request message for - [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.ExportTaxonomiesResponse: - Response message for - [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. - - """ - # Create or coerce a protobuf request object. - request = policytagmanagerserialization.ExportTaxonomiesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.export_taxonomies, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def __aenter__(self) -> "PolicyTagManagerSerializationAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "PolicyTagManagerSerializationAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py deleted file mode 100644 index 05e50ba07d8b..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py +++ /dev/null @@ -1,906 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.datacatalog_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.datacatalog_v1.types import policytagmanager -from google.cloud.datacatalog_v1.types import policytagmanagerserialization -from google.cloud.datacatalog_v1.types import timestamps -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .transports.base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import PolicyTagManagerSerializationGrpcTransport -from .transports.grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport - - -class PolicyTagManagerSerializationClientMeta(type): - """Metaclass for the PolicyTagManagerSerialization client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[PolicyTagManagerSerializationTransport]] - _transport_registry["grpc"] = PolicyTagManagerSerializationGrpcTransport - _transport_registry["grpc_asyncio"] = PolicyTagManagerSerializationGrpcAsyncIOTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[PolicyTagManagerSerializationTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class PolicyTagManagerSerializationClient(metaclass=PolicyTagManagerSerializationClientMeta): - """Policy Tag Manager Serialization API service allows you to - manipulate your policy tags and taxonomies in a serialized - format. - - Taxonomy is a hierarchical group of policy tags. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "datacatalog.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PolicyTagManagerSerializationClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PolicyTagManagerSerializationClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> PolicyTagManagerSerializationTransport: - """Returns the transport used by the client instance. - - Returns: - PolicyTagManagerSerializationTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def taxonomy_path(project: str,location: str,taxonomy: str,) -> str: - """Returns a fully-qualified taxonomy string.""" - return "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format(project=project, location=location, taxonomy=taxonomy, ) - - @staticmethod - def parse_taxonomy_path(path: str) -> Dict[str,str]: - """Parses a taxonomy path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/taxonomies/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, PolicyTagManagerSerializationTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the policy tag manager serialization client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, PolicyTagManagerSerializationTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, PolicyTagManagerSerializationTransport): - # transport is a PolicyTagManagerSerializationTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def replace_taxonomy(self, - request: Optional[Union[policytagmanagerserialization.ReplaceTaxonomyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.Taxonomy: - r"""Replaces (updates) a taxonomy and all its policy tags. - - The taxonomy and its entire hierarchy of policy tags must be - represented literally by ``SerializedTaxonomy`` and the nested - ``SerializedPolicyTag`` messages. - - This operation automatically does the following: - - - Deletes the existing policy tags that are missing from the - ``SerializedPolicyTag``. - - Creates policy tags that don't have resource names. They are - considered new. - - Updates policy tags with valid resources names accordingly. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_replace_taxonomy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerSerializationClient() - - # Initialize request argument(s) - serialized_taxonomy = datacatalog_v1.SerializedTaxonomy() - serialized_taxonomy.display_name = "display_name_value" - - request = datacatalog_v1.ReplaceTaxonomyRequest( - name="name_value", - serialized_taxonomy=serialized_taxonomy, - ) - - # Make the request - response = client.replace_taxonomy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.ReplaceTaxonomyRequest, dict]): - The request object. Request message for - [ReplaceTaxonomy][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ReplaceTaxonomy]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.Taxonomy: - A taxonomy is a collection of hierarchical policy tags that classify data - along a common axis. - - For example, a "data sensitivity" taxonomy might - contain the following policy tags: - - :literal:`\` + PII + Account number + Age + SSN + Zipcode + Financials + Revenue`\ \` - - A "data origin" taxonomy might contain the following - policy tags: - - :literal:`\` + User data + Employee data + Partner data + Public data`\ \` - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanagerserialization.ReplaceTaxonomyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanagerserialization.ReplaceTaxonomyRequest): - request = policytagmanagerserialization.ReplaceTaxonomyRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.replace_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def import_taxonomies(self, - request: Optional[Union[policytagmanagerserialization.ImportTaxonomiesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanagerserialization.ImportTaxonomiesResponse: - r"""Creates new taxonomies (including their policy tags) - in a given project by importing from inlined or - cross-regional sources. - - For a cross-regional source, new taxonomies are created - by copying from a source in another region. - - For an inlined source, taxonomies and policy tags are - created in bulk using nested protocol buffer structures. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_import_taxonomies(): - # Create a client - client = datacatalog_v1.PolicyTagManagerSerializationClient() - - # Initialize request argument(s) - inline_source = datacatalog_v1.InlineSource() - inline_source.taxonomies.display_name = "display_name_value" - - request = datacatalog_v1.ImportTaxonomiesRequest( - inline_source=inline_source, - parent="parent_value", - ) - - # Make the request - response = client.import_taxonomies(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.ImportTaxonomiesRequest, dict]): - The request object. Request message for - [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.ImportTaxonomiesResponse: - Response message for - [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanagerserialization.ImportTaxonomiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanagerserialization.ImportTaxonomiesRequest): - request = policytagmanagerserialization.ImportTaxonomiesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.import_taxonomies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def export_taxonomies(self, - request: Optional[Union[policytagmanagerserialization.ExportTaxonomiesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanagerserialization.ExportTaxonomiesResponse: - r"""Exports taxonomies in the requested type and returns them, - including their policy tags. The requested taxonomies must - belong to the same project. - - This method generates ``SerializedTaxonomy`` protocol buffers - with nested policy tags that can be used as input for - ``ImportTaxonomies`` calls. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1 - - def sample_export_taxonomies(): - # Create a client - client = datacatalog_v1.PolicyTagManagerSerializationClient() - - # Initialize request argument(s) - request = datacatalog_v1.ExportTaxonomiesRequest( - serialized_taxonomies=True, - parent="parent_value", - taxonomies=['taxonomies_value1', 'taxonomies_value2'], - ) - - # Make the request - response = client.export_taxonomies(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1.types.ExportTaxonomiesRequest, dict]): - The request object. Request message for - [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1.types.ExportTaxonomiesResponse: - Response message for - [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanagerserialization.ExportTaxonomiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanagerserialization.ExportTaxonomiesRequest): - request = policytagmanagerserialization.ExportTaxonomiesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.export_taxonomies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "PolicyTagManagerSerializationClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "PolicyTagManagerSerializationClient", -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/__init__.py deleted file mode 100644 index faf2990e5837..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import PolicyTagManagerSerializationTransport -from .grpc import PolicyTagManagerSerializationGrpcTransport -from .grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[PolicyTagManagerSerializationTransport]] -_transport_registry['grpc'] = PolicyTagManagerSerializationGrpcTransport -_transport_registry['grpc_asyncio'] = PolicyTagManagerSerializationGrpcAsyncIOTransport - -__all__ = ( - 'PolicyTagManagerSerializationTransport', - 'PolicyTagManagerSerializationGrpcTransport', - 'PolicyTagManagerSerializationGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/base.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/base.py deleted file mode 100644 index 4c384efe5e4d..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/base.py +++ /dev/null @@ -1,216 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.datacatalog_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.datacatalog_v1.types import policytagmanager -from google.cloud.datacatalog_v1.types import policytagmanagerserialization -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class PolicyTagManagerSerializationTransport(abc.ABC): - """Abstract transport class for PolicyTagManagerSerialization.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'datacatalog.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.replace_taxonomy: gapic_v1.method.wrap_method( - self.replace_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.import_taxonomies: gapic_v1.method.wrap_method( - self.import_taxonomies, - default_timeout=None, - client_info=client_info, - ), - self.export_taxonomies: gapic_v1.method.wrap_method( - self.export_taxonomies, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def replace_taxonomy(self) -> Callable[ - [policytagmanagerserialization.ReplaceTaxonomyRequest], - Union[ - policytagmanager.Taxonomy, - Awaitable[policytagmanager.Taxonomy] - ]]: - raise NotImplementedError() - - @property - def import_taxonomies(self) -> Callable[ - [policytagmanagerserialization.ImportTaxonomiesRequest], - Union[ - policytagmanagerserialization.ImportTaxonomiesResponse, - Awaitable[policytagmanagerserialization.ImportTaxonomiesResponse] - ]]: - raise NotImplementedError() - - @property - def export_taxonomies(self) -> Callable[ - [policytagmanagerserialization.ExportTaxonomiesRequest], - Union[ - policytagmanagerserialization.ExportTaxonomiesResponse, - Awaitable[policytagmanagerserialization.ExportTaxonomiesResponse] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'PolicyTagManagerSerializationTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc.py deleted file mode 100644 index ca57a7325676..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc.py +++ /dev/null @@ -1,422 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.datacatalog_v1.types import policytagmanager -from google.cloud.datacatalog_v1.types import policytagmanagerserialization -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO - - -class PolicyTagManagerSerializationGrpcTransport(PolicyTagManagerSerializationTransport): - """gRPC backend transport for PolicyTagManagerSerialization. - - Policy Tag Manager Serialization API service allows you to - manipulate your policy tags and taxonomies in a serialized - format. - - Taxonomy is a hierarchical group of policy tags. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def replace_taxonomy(self) -> Callable[ - [policytagmanagerserialization.ReplaceTaxonomyRequest], - policytagmanager.Taxonomy]: - r"""Return a callable for the replace taxonomy method over gRPC. - - Replaces (updates) a taxonomy and all its policy tags. - - The taxonomy and its entire hierarchy of policy tags must be - represented literally by ``SerializedTaxonomy`` and the nested - ``SerializedPolicyTag`` messages. - - This operation automatically does the following: - - - Deletes the existing policy tags that are missing from the - ``SerializedPolicyTag``. - - Creates policy tags that don't have resource names. They are - considered new. - - Updates policy tags with valid resources names accordingly. - - Returns: - Callable[[~.ReplaceTaxonomyRequest], - ~.Taxonomy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'replace_taxonomy' not in self._stubs: - self._stubs['replace_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManagerSerialization/ReplaceTaxonomy', - request_serializer=policytagmanagerserialization.ReplaceTaxonomyRequest.serialize, - response_deserializer=policytagmanager.Taxonomy.deserialize, - ) - return self._stubs['replace_taxonomy'] - - @property - def import_taxonomies(self) -> Callable[ - [policytagmanagerserialization.ImportTaxonomiesRequest], - policytagmanagerserialization.ImportTaxonomiesResponse]: - r"""Return a callable for the import taxonomies method over gRPC. - - Creates new taxonomies (including their policy tags) - in a given project by importing from inlined or - cross-regional sources. - - For a cross-regional source, new taxonomies are created - by copying from a source in another region. - - For an inlined source, taxonomies and policy tags are - created in bulk using nested protocol buffer structures. - - Returns: - Callable[[~.ImportTaxonomiesRequest], - ~.ImportTaxonomiesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'import_taxonomies' not in self._stubs: - self._stubs['import_taxonomies'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManagerSerialization/ImportTaxonomies', - request_serializer=policytagmanagerserialization.ImportTaxonomiesRequest.serialize, - response_deserializer=policytagmanagerserialization.ImportTaxonomiesResponse.deserialize, - ) - return self._stubs['import_taxonomies'] - - @property - def export_taxonomies(self) -> Callable[ - [policytagmanagerserialization.ExportTaxonomiesRequest], - policytagmanagerserialization.ExportTaxonomiesResponse]: - r"""Return a callable for the export taxonomies method over gRPC. - - Exports taxonomies in the requested type and returns them, - including their policy tags. The requested taxonomies must - belong to the same project. - - This method generates ``SerializedTaxonomy`` protocol buffers - with nested policy tags that can be used as input for - ``ImportTaxonomies`` calls. - - Returns: - Callable[[~.ExportTaxonomiesRequest], - ~.ExportTaxonomiesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'export_taxonomies' not in self._stubs: - self._stubs['export_taxonomies'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManagerSerialization/ExportTaxonomies', - request_serializer=policytagmanagerserialization.ExportTaxonomiesRequest.serialize, - response_deserializer=policytagmanagerserialization.ExportTaxonomiesResponse.deserialize, - ) - return self._stubs['export_taxonomies'] - - def close(self): - self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'PolicyTagManagerSerializationGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py deleted file mode 100644 index 310cb52405b7..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py +++ /dev/null @@ -1,421 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.datacatalog_v1.types import policytagmanager -from google.cloud.datacatalog_v1.types import policytagmanagerserialization -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO -from .grpc import PolicyTagManagerSerializationGrpcTransport - - -class PolicyTagManagerSerializationGrpcAsyncIOTransport(PolicyTagManagerSerializationTransport): - """gRPC AsyncIO backend transport for PolicyTagManagerSerialization. - - Policy Tag Manager Serialization API service allows you to - manipulate your policy tags and taxonomies in a serialized - format. - - Taxonomy is a hierarchical group of policy tags. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def replace_taxonomy(self) -> Callable[ - [policytagmanagerserialization.ReplaceTaxonomyRequest], - Awaitable[policytagmanager.Taxonomy]]: - r"""Return a callable for the replace taxonomy method over gRPC. - - Replaces (updates) a taxonomy and all its policy tags. - - The taxonomy and its entire hierarchy of policy tags must be - represented literally by ``SerializedTaxonomy`` and the nested - ``SerializedPolicyTag`` messages. - - This operation automatically does the following: - - - Deletes the existing policy tags that are missing from the - ``SerializedPolicyTag``. - - Creates policy tags that don't have resource names. They are - considered new. - - Updates policy tags with valid resources names accordingly. - - Returns: - Callable[[~.ReplaceTaxonomyRequest], - Awaitable[~.Taxonomy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'replace_taxonomy' not in self._stubs: - self._stubs['replace_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManagerSerialization/ReplaceTaxonomy', - request_serializer=policytagmanagerserialization.ReplaceTaxonomyRequest.serialize, - response_deserializer=policytagmanager.Taxonomy.deserialize, - ) - return self._stubs['replace_taxonomy'] - - @property - def import_taxonomies(self) -> Callable[ - [policytagmanagerserialization.ImportTaxonomiesRequest], - Awaitable[policytagmanagerserialization.ImportTaxonomiesResponse]]: - r"""Return a callable for the import taxonomies method over gRPC. - - Creates new taxonomies (including their policy tags) - in a given project by importing from inlined or - cross-regional sources. - - For a cross-regional source, new taxonomies are created - by copying from a source in another region. - - For an inlined source, taxonomies and policy tags are - created in bulk using nested protocol buffer structures. - - Returns: - Callable[[~.ImportTaxonomiesRequest], - Awaitable[~.ImportTaxonomiesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'import_taxonomies' not in self._stubs: - self._stubs['import_taxonomies'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManagerSerialization/ImportTaxonomies', - request_serializer=policytagmanagerserialization.ImportTaxonomiesRequest.serialize, - response_deserializer=policytagmanagerserialization.ImportTaxonomiesResponse.deserialize, - ) - return self._stubs['import_taxonomies'] - - @property - def export_taxonomies(self) -> Callable[ - [policytagmanagerserialization.ExportTaxonomiesRequest], - Awaitable[policytagmanagerserialization.ExportTaxonomiesResponse]]: - r"""Return a callable for the export taxonomies method over gRPC. - - Exports taxonomies in the requested type and returns them, - including their policy tags. The requested taxonomies must - belong to the same project. - - This method generates ``SerializedTaxonomy`` protocol buffers - with nested policy tags that can be used as input for - ``ImportTaxonomies`` calls. - - Returns: - Callable[[~.ExportTaxonomiesRequest], - Awaitable[~.ExportTaxonomiesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'export_taxonomies' not in self._stubs: - self._stubs['export_taxonomies'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1.PolicyTagManagerSerialization/ExportTaxonomies', - request_serializer=policytagmanagerserialization.ExportTaxonomiesRequest.serialize, - response_deserializer=policytagmanagerserialization.ExportTaxonomiesResponse.deserialize, - ) - return self._stubs['export_taxonomies'] - - def close(self): - return self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - -__all__ = ( - 'PolicyTagManagerSerializationGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/__init__.py deleted file mode 100644 index c44baf372a05..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/__init__.py +++ /dev/null @@ -1,288 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .bigquery import ( - BigQueryConnectionSpec, - BigQueryRoutineSpec, - CloudSqlBigQueryConnectionSpec, -) -from .common import ( - PersonalDetails, - IntegratedSystem, - ManagingSystem, -) -from .data_source import ( - DataSource, - StorageProperties, -) -from .datacatalog import ( - BusinessContext, - CloudBigtableInstanceSpec, - CloudBigtableSystemSpec, - Contacts, - CreateEntryGroupRequest, - CreateEntryRequest, - CreateTagRequest, - CreateTagTemplateFieldRequest, - CreateTagTemplateRequest, - DatabaseTableSpec, - DatasetSpec, - DataSourceConnectionSpec, - DeleteEntryGroupRequest, - DeleteEntryRequest, - DeleteTagRequest, - DeleteTagTemplateFieldRequest, - DeleteTagTemplateRequest, - Entry, - EntryGroup, - EntryOverview, - FilesetSpec, - GetEntryGroupRequest, - GetEntryRequest, - GetTagTemplateRequest, - ImportEntriesMetadata, - ImportEntriesRequest, - ImportEntriesResponse, - ListEntriesRequest, - ListEntriesResponse, - ListEntryGroupsRequest, - ListEntryGroupsResponse, - ListTagsRequest, - ListTagsResponse, - LookerSystemSpec, - LookupEntryRequest, - ModelSpec, - ModifyEntryContactsRequest, - ModifyEntryOverviewRequest, - ReconcileTagsMetadata, - ReconcileTagsRequest, - ReconcileTagsResponse, - RenameTagTemplateFieldEnumValueRequest, - RenameTagTemplateFieldRequest, - RoutineSpec, - SearchCatalogRequest, - SearchCatalogResponse, - ServiceSpec, - SqlDatabaseSystemSpec, - StarEntryRequest, - StarEntryResponse, - UnstarEntryRequest, - UnstarEntryResponse, - UpdateEntryGroupRequest, - UpdateEntryRequest, - UpdateTagRequest, - UpdateTagTemplateFieldRequest, - UpdateTagTemplateRequest, - VertexDatasetSpec, - VertexModelSourceInfo, - VertexModelSpec, - EntryType, -) -from .dataplex_spec import ( - DataplexExternalTable, - DataplexFilesetSpec, - DataplexSpec, - DataplexTableSpec, -) -from .dump_content import ( - DumpItem, - TaggedEntry, -) -from .gcs_fileset_spec import ( - GcsFilesetSpec, - GcsFileSpec, -) -from .physical_schema import ( - PhysicalSchema, -) -from .policytagmanager import ( - CreatePolicyTagRequest, - CreateTaxonomyRequest, - DeletePolicyTagRequest, - DeleteTaxonomyRequest, - GetPolicyTagRequest, - GetTaxonomyRequest, - ListPolicyTagsRequest, - ListPolicyTagsResponse, - ListTaxonomiesRequest, - ListTaxonomiesResponse, - PolicyTag, - Taxonomy, - UpdatePolicyTagRequest, - UpdateTaxonomyRequest, -) -from .policytagmanagerserialization import ( - CrossRegionalSource, - ExportTaxonomiesRequest, - ExportTaxonomiesResponse, - ImportTaxonomiesRequest, - ImportTaxonomiesResponse, - InlineSource, - ReplaceTaxonomyRequest, - SerializedPolicyTag, - SerializedTaxonomy, -) -from .schema import ( - ColumnSchema, - Schema, -) -from .search import ( - SearchCatalogResult, - SearchResultType, -) -from .table_spec import ( - BigQueryDateShardedSpec, - BigQueryTableSpec, - TableSpec, - ViewSpec, - TableSourceType, -) -from .tags import ( - FieldType, - Tag, - TagField, - TagTemplate, - TagTemplateField, -) -from .timestamps import ( - SystemTimestamps, -) -from .usage import ( - CommonUsageStats, - UsageSignal, - UsageStats, -) - -__all__ = ( - 'BigQueryConnectionSpec', - 'BigQueryRoutineSpec', - 'CloudSqlBigQueryConnectionSpec', - 'PersonalDetails', - 'IntegratedSystem', - 'ManagingSystem', - 'DataSource', - 'StorageProperties', - 'BusinessContext', - 'CloudBigtableInstanceSpec', - 'CloudBigtableSystemSpec', - 'Contacts', - 'CreateEntryGroupRequest', - 'CreateEntryRequest', - 'CreateTagRequest', - 'CreateTagTemplateFieldRequest', - 'CreateTagTemplateRequest', - 'DatabaseTableSpec', - 'DatasetSpec', - 'DataSourceConnectionSpec', - 'DeleteEntryGroupRequest', - 'DeleteEntryRequest', - 'DeleteTagRequest', - 'DeleteTagTemplateFieldRequest', - 'DeleteTagTemplateRequest', - 'Entry', - 'EntryGroup', - 'EntryOverview', - 'FilesetSpec', - 'GetEntryGroupRequest', - 'GetEntryRequest', - 'GetTagTemplateRequest', - 'ImportEntriesMetadata', - 'ImportEntriesRequest', - 'ImportEntriesResponse', - 'ListEntriesRequest', - 'ListEntriesResponse', - 'ListEntryGroupsRequest', - 'ListEntryGroupsResponse', - 'ListTagsRequest', - 'ListTagsResponse', - 'LookerSystemSpec', - 'LookupEntryRequest', - 'ModelSpec', - 'ModifyEntryContactsRequest', - 'ModifyEntryOverviewRequest', - 'ReconcileTagsMetadata', - 'ReconcileTagsRequest', - 'ReconcileTagsResponse', - 'RenameTagTemplateFieldEnumValueRequest', - 'RenameTagTemplateFieldRequest', - 'RoutineSpec', - 'SearchCatalogRequest', - 'SearchCatalogResponse', - 'ServiceSpec', - 'SqlDatabaseSystemSpec', - 'StarEntryRequest', - 'StarEntryResponse', - 'UnstarEntryRequest', - 'UnstarEntryResponse', - 'UpdateEntryGroupRequest', - 'UpdateEntryRequest', - 'UpdateTagRequest', - 'UpdateTagTemplateFieldRequest', - 'UpdateTagTemplateRequest', - 'VertexDatasetSpec', - 'VertexModelSourceInfo', - 'VertexModelSpec', - 'EntryType', - 'DataplexExternalTable', - 'DataplexFilesetSpec', - 'DataplexSpec', - 'DataplexTableSpec', - 'DumpItem', - 'TaggedEntry', - 'GcsFilesetSpec', - 'GcsFileSpec', - 'PhysicalSchema', - 'CreatePolicyTagRequest', - 'CreateTaxonomyRequest', - 'DeletePolicyTagRequest', - 'DeleteTaxonomyRequest', - 'GetPolicyTagRequest', - 'GetTaxonomyRequest', - 'ListPolicyTagsRequest', - 'ListPolicyTagsResponse', - 'ListTaxonomiesRequest', - 'ListTaxonomiesResponse', - 'PolicyTag', - 'Taxonomy', - 'UpdatePolicyTagRequest', - 'UpdateTaxonomyRequest', - 'CrossRegionalSource', - 'ExportTaxonomiesRequest', - 'ExportTaxonomiesResponse', - 'ImportTaxonomiesRequest', - 'ImportTaxonomiesResponse', - 'InlineSource', - 'ReplaceTaxonomyRequest', - 'SerializedPolicyTag', - 'SerializedTaxonomy', - 'ColumnSchema', - 'Schema', - 'SearchCatalogResult', - 'SearchResultType', - 'BigQueryDateShardedSpec', - 'BigQueryTableSpec', - 'TableSpec', - 'ViewSpec', - 'TableSourceType', - 'FieldType', - 'Tag', - 'TagField', - 'TagTemplate', - 'TagTemplateField', - 'SystemTimestamps', - 'CommonUsageStats', - 'UsageSignal', - 'UsageStats', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/bigquery.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/bigquery.py deleted file mode 100644 index c6276afea7f5..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/bigquery.py +++ /dev/null @@ -1,136 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1', - manifest={ - 'BigQueryConnectionSpec', - 'CloudSqlBigQueryConnectionSpec', - 'BigQueryRoutineSpec', - }, -) - - -class BigQueryConnectionSpec(proto.Message): - r"""Specification for the BigQuery connection. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - connection_type (google.cloud.datacatalog_v1.types.BigQueryConnectionSpec.ConnectionType): - The type of the BigQuery connection. - cloud_sql (google.cloud.datacatalog_v1.types.CloudSqlBigQueryConnectionSpec): - Specification for the BigQuery connection to - a Cloud SQL instance. - - This field is a member of `oneof`_ ``connection_spec``. - has_credential (bool): - True if there are credentials attached to the - BigQuery connection; false otherwise. - """ - class ConnectionType(proto.Enum): - r"""The type of the BigQuery connection. - - Values: - CONNECTION_TYPE_UNSPECIFIED (0): - Unspecified type. - CLOUD_SQL (1): - Cloud SQL connection. - """ - CONNECTION_TYPE_UNSPECIFIED = 0 - CLOUD_SQL = 1 - - connection_type: ConnectionType = proto.Field( - proto.ENUM, - number=1, - enum=ConnectionType, - ) - cloud_sql: 'CloudSqlBigQueryConnectionSpec' = proto.Field( - proto.MESSAGE, - number=2, - oneof='connection_spec', - message='CloudSqlBigQueryConnectionSpec', - ) - has_credential: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class CloudSqlBigQueryConnectionSpec(proto.Message): - r"""Specification for the BigQuery connection to a Cloud SQL - instance. - - Attributes: - instance_id (str): - Cloud SQL instance ID in the format of - ``project:location:instance``. - database (str): - Database name. - type_ (google.cloud.datacatalog_v1.types.CloudSqlBigQueryConnectionSpec.DatabaseType): - Type of the Cloud SQL database. - """ - class DatabaseType(proto.Enum): - r"""Supported Cloud SQL database types. - - Values: - DATABASE_TYPE_UNSPECIFIED (0): - Unspecified database type. - POSTGRES (1): - Cloud SQL for PostgreSQL. - MYSQL (2): - Cloud SQL for MySQL. - """ - DATABASE_TYPE_UNSPECIFIED = 0 - POSTGRES = 1 - MYSQL = 2 - - instance_id: str = proto.Field( - proto.STRING, - number=1, - ) - database: str = proto.Field( - proto.STRING, - number=2, - ) - type_: DatabaseType = proto.Field( - proto.ENUM, - number=3, - enum=DatabaseType, - ) - - -class BigQueryRoutineSpec(proto.Message): - r"""Fields specific for BigQuery routines. - - Attributes: - imported_libraries (MutableSequence[str]): - Paths of the imported libraries. - """ - - imported_libraries: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/common.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/common.py deleted file mode 100644 index cd5c33035aa8..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/common.py +++ /dev/null @@ -1,112 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1', - manifest={ - 'IntegratedSystem', - 'ManagingSystem', - 'PersonalDetails', - }, -) - - -class IntegratedSystem(proto.Enum): - r"""This enum lists all the systems that Data Catalog integrates - with. - - Values: - INTEGRATED_SYSTEM_UNSPECIFIED (0): - Default unknown system. - BIGQUERY (1): - BigQuery. - CLOUD_PUBSUB (2): - Cloud Pub/Sub. - DATAPROC_METASTORE (3): - Dataproc Metastore. - DATAPLEX (4): - Dataplex. - CLOUD_SPANNER (6): - Cloud Spanner - CLOUD_BIGTABLE (7): - Cloud Bigtable - CLOUD_SQL (8): - Cloud Sql - LOOKER (9): - Looker - VERTEX_AI (10): - Vertex AI - """ - INTEGRATED_SYSTEM_UNSPECIFIED = 0 - BIGQUERY = 1 - CLOUD_PUBSUB = 2 - DATAPROC_METASTORE = 3 - DATAPLEX = 4 - CLOUD_SPANNER = 6 - CLOUD_BIGTABLE = 7 - CLOUD_SQL = 8 - LOOKER = 9 - VERTEX_AI = 10 - - -class ManagingSystem(proto.Enum): - r"""This enum describes all the systems that manage - Taxonomy and PolicyTag resources in DataCatalog. - - Values: - MANAGING_SYSTEM_UNSPECIFIED (0): - Default value - MANAGING_SYSTEM_DATAPLEX (1): - Dataplex. - MANAGING_SYSTEM_OTHER (2): - Other - """ - MANAGING_SYSTEM_UNSPECIFIED = 0 - MANAGING_SYSTEM_DATAPLEX = 1 - MANAGING_SYSTEM_OTHER = 2 - - -class PersonalDetails(proto.Message): - r"""Entry metadata relevant only to the user and private to them. - - Attributes: - starred (bool): - True if the entry is starred by the user; - false otherwise. - star_time (google.protobuf.timestamp_pb2.Timestamp): - Set if the entry is starred; unset otherwise. - """ - - starred: bool = proto.Field( - proto.BOOL, - number=1, - ) - star_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/data_source.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/data_source.py deleted file mode 100644 index 2d497ec1370c..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/data_source.py +++ /dev/null @@ -1,132 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1', - manifest={ - 'DataSource', - 'StorageProperties', - }, -) - - -class DataSource(proto.Message): - r"""Physical location of an entry. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - service (google.cloud.datacatalog_v1.types.DataSource.Service): - Service that physically stores the data. - resource (str): - Full name of a resource as defined by the service. For - example: - - ``//bigquery.googleapis.com/projects/{PROJECT_ID}/locations/{LOCATION}/datasets/{DATASET_ID}/tables/{TABLE_ID}`` - source_entry (str): - Output only. Data Catalog entry name, if - applicable. - storage_properties (google.cloud.datacatalog_v1.types.StorageProperties): - Detailed properties of the underlying - storage. - - This field is a member of `oneof`_ ``properties``. - """ - class Service(proto.Enum): - r"""Name of a service that stores the data. - - Values: - SERVICE_UNSPECIFIED (0): - Default unknown service. - CLOUD_STORAGE (1): - Google Cloud Storage service. - BIGQUERY (2): - BigQuery service. - """ - SERVICE_UNSPECIFIED = 0 - CLOUD_STORAGE = 1 - BIGQUERY = 2 - - service: Service = proto.Field( - proto.ENUM, - number=1, - enum=Service, - ) - resource: str = proto.Field( - proto.STRING, - number=2, - ) - source_entry: str = proto.Field( - proto.STRING, - number=3, - ) - storage_properties: 'StorageProperties' = proto.Field( - proto.MESSAGE, - number=4, - oneof='properties', - message='StorageProperties', - ) - - -class StorageProperties(proto.Message): - r"""Details the properties of the underlying storage. - - Attributes: - file_pattern (MutableSequence[str]): - Patterns to identify a set of files for this fileset. - - Examples of a valid ``file_pattern``: - - - ``gs://bucket_name/dir/*``: matches all files in the - ``bucket_name/dir`` directory - - ``gs://bucket_name/dir/**``: matches all files in the - ``bucket_name/dir`` and all subdirectories recursively - - ``gs://bucket_name/file*``: matches files prefixed by - ``file`` in ``bucket_name`` - - ``gs://bucket_name/??.txt``: matches files with two - characters followed by ``.txt`` in ``bucket_name`` - - ``gs://bucket_name/[aeiou].txt``: matches files that - contain a single vowel character followed by ``.txt`` in - ``bucket_name`` - - ``gs://bucket_name/[a-m].txt``: matches files that - contain ``a``, ``b``, ... or ``m`` followed by ``.txt`` - in ``bucket_name`` - - ``gs://bucket_name/a/*/b``: matches all files in - ``bucket_name`` that match the ``a/*/b`` pattern, such as - ``a/c/b``, ``a/d/b`` - - ``gs://another_bucket/a.txt``: matches - ``gs://another_bucket/a.txt`` - file_type (str): - File type in MIME format, for example, ``text/plain``. - """ - - file_pattern: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - file_type: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/datacatalog.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/datacatalog.py deleted file mode 100644 index 831fc14fc2fd..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/datacatalog.py +++ /dev/null @@ -1,2727 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.datacatalog_v1.types import bigquery -from google.cloud.datacatalog_v1.types import common -from google.cloud.datacatalog_v1.types import data_source as gcd_data_source -from google.cloud.datacatalog_v1.types import dataplex_spec -from google.cloud.datacatalog_v1.types import gcs_fileset_spec as gcd_gcs_fileset_spec -from google.cloud.datacatalog_v1.types import schema as gcd_schema -from google.cloud.datacatalog_v1.types import search -from google.cloud.datacatalog_v1.types import table_spec -from google.cloud.datacatalog_v1.types import tags as gcd_tags -from google.cloud.datacatalog_v1.types import timestamps -from google.cloud.datacatalog_v1.types import usage -from google.protobuf import field_mask_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1', - manifest={ - 'EntryType', - 'SearchCatalogRequest', - 'SearchCatalogResponse', - 'CreateEntryGroupRequest', - 'UpdateEntryGroupRequest', - 'GetEntryGroupRequest', - 'DeleteEntryGroupRequest', - 'ListEntryGroupsRequest', - 'ListEntryGroupsResponse', - 'CreateEntryRequest', - 'UpdateEntryRequest', - 'DeleteEntryRequest', - 'GetEntryRequest', - 'LookupEntryRequest', - 'Entry', - 'DatabaseTableSpec', - 'FilesetSpec', - 'DataSourceConnectionSpec', - 'RoutineSpec', - 'DatasetSpec', - 'SqlDatabaseSystemSpec', - 'LookerSystemSpec', - 'CloudBigtableSystemSpec', - 'CloudBigtableInstanceSpec', - 'ServiceSpec', - 'VertexModelSourceInfo', - 'VertexModelSpec', - 'VertexDatasetSpec', - 'ModelSpec', - 'BusinessContext', - 'EntryOverview', - 'Contacts', - 'EntryGroup', - 'CreateTagTemplateRequest', - 'GetTagTemplateRequest', - 'UpdateTagTemplateRequest', - 'DeleteTagTemplateRequest', - 'CreateTagRequest', - 'UpdateTagRequest', - 'DeleteTagRequest', - 'CreateTagTemplateFieldRequest', - 'UpdateTagTemplateFieldRequest', - 'RenameTagTemplateFieldRequest', - 'RenameTagTemplateFieldEnumValueRequest', - 'DeleteTagTemplateFieldRequest', - 'ListTagsRequest', - 'ListTagsResponse', - 'ReconcileTagsRequest', - 'ReconcileTagsResponse', - 'ReconcileTagsMetadata', - 'ListEntriesRequest', - 'ListEntriesResponse', - 'StarEntryRequest', - 'StarEntryResponse', - 'UnstarEntryRequest', - 'UnstarEntryResponse', - 'ImportEntriesRequest', - 'ImportEntriesResponse', - 'ImportEntriesMetadata', - 'ModifyEntryOverviewRequest', - 'ModifyEntryContactsRequest', - }, -) - - -class EntryType(proto.Enum): - r"""Metadata automatically ingested from Google Cloud resources like - BigQuery tables or Pub/Sub topics always uses enum values from - ``EntryType`` as the type of entry. - - Other sources of metadata like Hive or Oracle databases can identify - the type by either using one of the enum values from ``EntryType`` - (for example, ``FILESET`` for a Cloud Storage fileset) or specifying - a custom value using the ```Entry`` <#resource:-entry>`__ field - ``user_specified_type``. For more information, see `Surface files - from Cloud Storage with fileset - entries `__ or `Create custom - entries for your data - sources `__. - - Values: - ENTRY_TYPE_UNSPECIFIED (0): - Default unknown type. - TABLE (2): - The entry type that has a GoogleSQL schema, - including logical views. - MODEL (5): - The type of models. - - For more information, see `Supported models in BigQuery - ML `__. - DATA_STREAM (3): - An entry type for streaming entries. For - example, a Pub/Sub topic. - FILESET (4): - An entry type for a set of files or objects. - For example, a Cloud Storage fileset. - CLUSTER (6): - A group of servers that work together. For - example, a Kafka cluster. - DATABASE (7): - A database. - DATA_SOURCE_CONNECTION (8): - Connection to a data source. For example, a - BigQuery connection. - ROUTINE (9): - Routine, for example, a BigQuery routine. - LAKE (10): - A Dataplex lake. - ZONE (11): - A Dataplex zone. - SERVICE (14): - A service, for example, a Dataproc Metastore - service. - DATABASE_SCHEMA (15): - Schema within a relational database. - DASHBOARD (16): - A Dashboard, for example from Looker. - EXPLORE (17): - A Looker Explore. - - For more information, see [Looker Explore API] - (https://developers.looker.com/api/explorer/4.0/methods/LookmlModel/lookml_model_explore). - LOOK (18): - A Looker Look. - - For more information, see [Looker Look API] - (https://developers.looker.com/api/explorer/4.0/methods/Look). - """ - ENTRY_TYPE_UNSPECIFIED = 0 - TABLE = 2 - MODEL = 5 - DATA_STREAM = 3 - FILESET = 4 - CLUSTER = 6 - DATABASE = 7 - DATA_SOURCE_CONNECTION = 8 - ROUTINE = 9 - LAKE = 10 - ZONE = 11 - SERVICE = 14 - DATABASE_SCHEMA = 15 - DASHBOARD = 16 - EXPLORE = 17 - LOOK = 18 - - -class SearchCatalogRequest(proto.Message): - r"""Request message for - [SearchCatalog][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog]. - - Attributes: - scope (google.cloud.datacatalog_v1.types.SearchCatalogRequest.Scope): - Required. The scope of this search request. - - The ``scope`` is invalid if ``include_org_ids``, - ``include_project_ids`` are empty AND - ``include_gcp_public_datasets`` is set to ``false``. In this - case, the request returns an error. - query (str): - Optional. The query string with a minimum of 3 characters - and specific syntax. For more information, see `Data Catalog - search - syntax `__. - - An empty query string returns all data assets (in the - specified scope) that you have access to. - - A query string can be a simple ``xyz`` or qualified by - predicates: - - - ``name:x`` - - ``column:y`` - - ``description:z`` - page_size (int): - Upper bound on the number of results you can - get in a single response. - Can't be negative or 0, defaults to 10 in this - case. The maximum number is 1000. If exceeded, - throws an "invalid argument" exception. - page_token (str): - Optional. Pagination token that, if specified, returns the - next page of search results. If empty, returns the first - page. - - This token is returned in the - [SearchCatalogResponse.next_page_token][google.cloud.datacatalog.v1.SearchCatalogResponse.next_page_token] - field of the response to a previous - [SearchCatalogRequest][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog] - call. - order_by (str): - Specifies the order of results. - - Currently supported case-sensitive values are: - - - ``relevance`` that can only be descending - - ``last_modified_timestamp [asc|desc]`` with descending - (``desc``) as default - - ``default`` that can only be descending - - Search queries don't guarantee full recall. Results that - match your query might not be returned, even in subsequent - result pages. Additionally, returned (and not returned) - results can vary if you repeat search queries. If you are - experiencing recall issues and you don't have to fetch the - results in any specific order, consider setting this - parameter to ``default``. - - If this parameter is omitted, it defaults to the descending - ``relevance``. - admin_search (bool): - Optional. If set, use searchAll permission granted on - organizations from ``include_org_ids`` and projects from - ``include_project_ids`` instead of the fine grained per - resource permissions when filtering the search results. The - only allowed ``order_by`` criteria for admin_search mode is - ``default``. Using this flags guarantees a full recall of - the search results. - """ - - class Scope(proto.Message): - r"""The criteria that select the subspace used for query - matching. - - Attributes: - include_org_ids (MutableSequence[str]): - The list of organization IDs to search within. - - To find your organization ID, follow the steps from - [Creating and managing organizations] - (/resource-manager/docs/creating-managing-organization). - include_project_ids (MutableSequence[str]): - The list of project IDs to search within. - - For more information on the distinction between project - names, IDs, and numbers, see - `Projects `__. - include_gcp_public_datasets (bool): - If ``true``, include Google Cloud public datasets in search - results. By default, they are excluded. - - See `Google Cloud Public Datasets `__ for - more information. - restricted_locations (MutableSequence[str]): - Optional. The list of locations to search within. If empty, - all locations are searched. - - Returns an error if any location in the list isn't one of - the `Supported - regions `__. - - If a location is unreachable, its name is returned in the - ``SearchCatalogResponse.unreachable`` field. To get - additional information on the error, repeat the search - request and set the location name as the value of this - parameter. - starred_only (bool): - Optional. If ``true``, search only among starred entries. - - By default, all results are returned, starred or not. - include_public_tag_templates (bool): - Optional. This field is deprecated. The - search mechanism for public and private tag - templates is the same. - """ - - include_org_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - include_project_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - include_gcp_public_datasets: bool = proto.Field( - proto.BOOL, - number=7, - ) - restricted_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=16, - ) - starred_only: bool = proto.Field( - proto.BOOL, - number=18, - ) - include_public_tag_templates: bool = proto.Field( - proto.BOOL, - number=19, - ) - - scope: Scope = proto.Field( - proto.MESSAGE, - number=6, - message=Scope, - ) - query: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - admin_search: bool = proto.Field( - proto.BOOL, - number=17, - ) - - -class SearchCatalogResponse(proto.Message): - r"""Response message for - [SearchCatalog][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog]. - - Attributes: - results (MutableSequence[google.cloud.datacatalog_v1.types.SearchCatalogResult]): - Search results. - total_size (int): - The approximate total number of entries - matched by the query. - next_page_token (str): - Pagination token that can be used in - subsequent calls to retrieve the next page of - results. - unreachable (MutableSequence[str]): - Unreachable locations. Search results don't include data - from those locations. - - To get additional information on an error, repeat the search - request and restrict it to specific locations by setting the - ``SearchCatalogRequest.scope.restricted_locations`` - parameter. - """ - - @property - def raw_page(self): - return self - - results: MutableSequence[search.SearchCatalogResult] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=search.SearchCatalogResult, - ) - total_size: int = proto.Field( - proto.INT32, - number=2, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=3, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - - -class CreateEntryGroupRequest(proto.Message): - r"""Request message for - [CreateEntryGroup][google.cloud.datacatalog.v1.DataCatalog.CreateEntryGroup]. - - Attributes: - parent (str): - Required. The names of the project and - location that the new entry group belongs to. - - Note: The entry group itself and its child - resources might not be stored in the location - specified in its name. - entry_group_id (str): - Required. The ID of the entry group to create. - - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and must start with a letter or underscore. - The maximum size is 64 bytes when encoded in UTF-8. - entry_group (google.cloud.datacatalog_v1.types.EntryGroup): - The entry group to create. Defaults to empty. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - entry_group_id: str = proto.Field( - proto.STRING, - number=3, - ) - entry_group: 'EntryGroup' = proto.Field( - proto.MESSAGE, - number=2, - message='EntryGroup', - ) - - -class UpdateEntryGroupRequest(proto.Message): - r"""Request message for - [UpdateEntryGroup][google.cloud.datacatalog.v1.DataCatalog.UpdateEntryGroup]. - - Attributes: - entry_group (google.cloud.datacatalog_v1.types.EntryGroup): - Required. Updates for the entry group. The ``name`` field - must be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Names of fields whose values to overwrite on - an entry group. - If this parameter is absent or empty, all - modifiable fields are overwritten. If such - fields are non-required and omitted in the - request body, their values are emptied. - """ - - entry_group: 'EntryGroup' = proto.Field( - proto.MESSAGE, - number=1, - message='EntryGroup', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class GetEntryGroupRequest(proto.Message): - r"""Request message for - [GetEntryGroup][google.cloud.datacatalog.v1.DataCatalog.GetEntryGroup]. - - Attributes: - name (str): - Required. The name of the entry group to get. - read_mask (google.protobuf.field_mask_pb2.FieldMask): - The fields to return. If empty or omitted, - all fields are returned. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - read_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteEntryGroupRequest(proto.Message): - r"""Request message for - [DeleteEntryGroup][google.cloud.datacatalog.v1.DataCatalog.DeleteEntryGroup]. - - Attributes: - name (str): - Required. The name of the entry group to - delete. - force (bool): - Optional. If true, deletes all entries in the - entry group. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - force: bool = proto.Field( - proto.BOOL, - number=2, - ) - - -class ListEntryGroupsRequest(proto.Message): - r"""Request message for - [ListEntryGroups][google.cloud.datacatalog.v1.DataCatalog.ListEntryGroups]. - - Attributes: - parent (str): - Required. The name of the location that - contains the entry groups to list. - Can be provided as a URL. - page_size (int): - Optional. The maximum number of items to return. - - Default is 10. Maximum limit is 1000. Throws an invalid - argument if ``page_size`` is greater than 1000. - page_token (str): - Optional. Pagination token that specifies the - next page to return. If empty, returns the first - page. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListEntryGroupsResponse(proto.Message): - r"""Response message for - [ListEntryGroups][google.cloud.datacatalog.v1.DataCatalog.ListEntryGroups]. - - Attributes: - entry_groups (MutableSequence[google.cloud.datacatalog_v1.types.EntryGroup]): - Entry group details. - next_page_token (str): - Pagination token to specify in the next call - to retrieve the next page of results. Empty if - there are no more items. - """ - - @property - def raw_page(self): - return self - - entry_groups: MutableSequence['EntryGroup'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='EntryGroup', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateEntryRequest(proto.Message): - r"""Request message for - [CreateEntry][google.cloud.datacatalog.v1.DataCatalog.CreateEntry]. - - Attributes: - parent (str): - Required. The name of the entry group this - entry belongs to. - Note: The entry itself and its child resources - might not be stored in the location specified in - its name. - entry_id (str): - Required. The ID of the entry to create. - - The ID must contain only letters (a-z, A-Z), numbers (0-9), - and underscores (_). The maximum size is 64 bytes when - encoded in UTF-8. - entry (google.cloud.datacatalog_v1.types.Entry): - Required. The entry to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - entry_id: str = proto.Field( - proto.STRING, - number=3, - ) - entry: 'Entry' = proto.Field( - proto.MESSAGE, - number=2, - message='Entry', - ) - - -class UpdateEntryRequest(proto.Message): - r"""Request message for - [UpdateEntry][google.cloud.datacatalog.v1.DataCatalog.UpdateEntry]. - - Attributes: - entry (google.cloud.datacatalog_v1.types.Entry): - Required. Updates for the entry. The ``name`` field must be - set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Names of fields whose values to overwrite on an entry. - - If this parameter is absent or empty, all modifiable fields - are overwritten. If such fields are non-required and omitted - in the request body, their values are emptied. - - You can modify only the fields listed below. - - For entries with type ``DATA_STREAM``: - - - ``schema`` - - For entries with type ``FILESET``: - - - ``schema`` - - ``display_name`` - - ``description`` - - ``gcs_fileset_spec`` - - ``gcs_fileset_spec.file_patterns`` - - For entries with ``user_specified_type``: - - - ``schema`` - - ``display_name`` - - ``description`` - - ``user_specified_type`` - - ``user_specified_system`` - - ``linked_resource`` - - ``source_system_timestamps`` - """ - - entry: 'Entry' = proto.Field( - proto.MESSAGE, - number=1, - message='Entry', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteEntryRequest(proto.Message): - r"""Request message for - [DeleteEntry][google.cloud.datacatalog.v1.DataCatalog.DeleteEntry]. - - Attributes: - name (str): - Required. The name of the entry to delete. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetEntryRequest(proto.Message): - r"""Request message for - [GetEntry][google.cloud.datacatalog.v1.DataCatalog.GetEntry]. - - Attributes: - name (str): - Required. The name of the entry to get. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class LookupEntryRequest(proto.Message): - r"""Request message for - [LookupEntry][google.cloud.datacatalog.v1.DataCatalog.LookupEntry]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - linked_resource (str): - The full name of the Google Cloud Platform resource the Data - Catalog entry represents. For more information, see [Full - Resource Name] - (https://cloud.google.com/apis/design/resource_names#full_resource_name). - - Full names are case-sensitive. For example: - - - ``//bigquery.googleapis.com/projects/{PROJECT_ID}/datasets/{DATASET_ID}/tables/{TABLE_ID}`` - - ``//pubsub.googleapis.com/projects/{PROJECT_ID}/topics/{TOPIC_ID}`` - - This field is a member of `oneof`_ ``target_name``. - sql_resource (str): - The SQL name of the entry. SQL names are case-sensitive. - - Examples: - - - ``pubsub.topic.{PROJECT_ID}.{TOPIC_ID}`` - - ``pubsub.topic.{PROJECT_ID}.``\ \`\ ``{TOPIC.ID.SEPARATED.WITH.DOTS}``\ \` - - ``bigquery.table.{PROJECT_ID}.{DATASET_ID}.{TABLE_ID}`` - - ``bigquery.dataset.{PROJECT_ID}.{DATASET_ID}`` - - ``datacatalog.entry.{PROJECT_ID}.{LOCATION_ID}.{ENTRY_GROUP_ID}.{ENTRY_ID}`` - - Identifiers (``*_ID``) should comply with the [Lexical - structure in Standard SQL] - (https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical). - - This field is a member of `oneof`_ ``target_name``. - fully_qualified_name (str): - `Fully Qualified Name - (FQN) `__ - of the resource. - - FQNs take two forms: - - - For non-regionalized resources: - - ``{SYSTEM}:{PROJECT}.{PATH_TO_RESOURCE_SEPARATED_WITH_DOTS}`` - - - For regionalized resources: - - ``{SYSTEM}:{PROJECT}.{LOCATION_ID}.{PATH_TO_RESOURCE_SEPARATED_WITH_DOTS}`` - - Example for a DPMS table: - - ``dataproc_metastore:{PROJECT_ID}.{LOCATION_ID}.{INSTANCE_ID}.{DATABASE_ID}.{TABLE_ID}`` - - This field is a member of `oneof`_ ``target_name``. - project (str): - Project where the lookup should be performed. Required to - lookup entry that is not a part of ``DPMS`` or ``DATAPLEX`` - ``integrated_system`` using its ``fully_qualified_name``. - Ignored in other cases. - location (str): - Location where the lookup should be performed. Required to - lookup entry that is not a part of ``DPMS`` or ``DATAPLEX`` - ``integrated_system`` using its ``fully_qualified_name``. - Ignored in other cases. - """ - - linked_resource: str = proto.Field( - proto.STRING, - number=1, - oneof='target_name', - ) - sql_resource: str = proto.Field( - proto.STRING, - number=3, - oneof='target_name', - ) - fully_qualified_name: str = proto.Field( - proto.STRING, - number=5, - oneof='target_name', - ) - project: str = proto.Field( - proto.STRING, - number=6, - ) - location: str = proto.Field( - proto.STRING, - number=7, - ) - - -class Entry(proto.Message): - r"""Entry metadata. A Data Catalog entry represents another resource in - Google Cloud Platform (such as a BigQuery dataset or a Pub/Sub - topic) or outside of it. You can use the ``linked_resource`` field - in the entry resource to refer to the original resource ID of the - source system. - - An entry resource contains resource details, for example, its - schema. Additionally, you can attach flexible metadata to an entry - in the form of a [Tag][google.cloud.datacatalog.v1.Tag]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. The resource name of an entry in - URL format. - Note: The entry itself and its child resources - might not be stored in the location specified in - its name. - linked_resource (str): - The resource this metadata entry refers to. - - For Google Cloud Platform resources, ``linked_resource`` is - the [Full Resource Name] - (https://cloud.google.com/apis/design/resource_names#full_resource_name). - For example, the ``linked_resource`` for a table resource - from BigQuery is: - - ``//bigquery.googleapis.com/projects/{PROJECT_ID}/datasets/{DATASET_ID}/tables/{TABLE_ID}`` - - Output only when the entry is one of the types in the - ``EntryType`` enum. - - For entries with a ``user_specified_type``, this field is - optional and defaults to an empty string. - - The resource string must contain only letters (a-z, A-Z), - numbers (0-9), underscores (_), periods (.), colons (:), - slashes (/), dashes (-), and hashes (#). The maximum size is - 200 bytes when encoded in UTF-8. - fully_qualified_name (str): - `Fully Qualified Name - (FQN) `__ - of the resource. Set automatically for entries representing - resources from synced systems. Settable only during - creation, and read-only later. Can be used for search and - lookup of the entries. - type_ (google.cloud.datacatalog_v1.types.EntryType): - The type of the entry. - - For details, see ```EntryType`` <#entrytype>`__. - - This field is a member of `oneof`_ ``entry_type``. - user_specified_type (str): - Custom entry type that doesn't match any of the values - allowed for input and listed in the ``EntryType`` enum. - - When creating an entry, first check the type values in the - enum. If there are no appropriate types for the new entry, - provide a custom value, for example, ``my_special_type``. - - The ``user_specified_type`` string has the following - limitations: - - - Is case insensitive. - - Must begin with a letter or underscore. - - Can only contain letters, numbers, and underscores. - - Must be at least 1 character and at most 64 characters - long. - - This field is a member of `oneof`_ ``entry_type``. - integrated_system (google.cloud.datacatalog_v1.types.IntegratedSystem): - Output only. Indicates the entry's source - system that Data Catalog integrates with, such - as BigQuery, Pub/Sub, or Dataproc Metastore. - - This field is a member of `oneof`_ ``system``. - user_specified_system (str): - Indicates the entry's source system that Data Catalog - doesn't automatically integrate with. - - The ``user_specified_system`` string has the following - limitations: - - - Is case insensitive. - - Must begin with a letter or underscore. - - Can only contain letters, numbers, and underscores. - - Must be at least 1 character and at most 64 characters - long. - - This field is a member of `oneof`_ ``system``. - sql_database_system_spec (google.cloud.datacatalog_v1.types.SqlDatabaseSystemSpec): - Specification that applies to a relational database system. - Only settable when ``user_specified_system`` is equal to - ``SQL_DATABASE`` - - This field is a member of `oneof`_ ``system_spec``. - looker_system_spec (google.cloud.datacatalog_v1.types.LookerSystemSpec): - Specification that applies to Looker sysstem. Only settable - when ``user_specified_system`` is equal to ``LOOKER`` - - This field is a member of `oneof`_ ``system_spec``. - cloud_bigtable_system_spec (google.cloud.datacatalog_v1.types.CloudBigtableSystemSpec): - Specification that applies to Cloud Bigtable system. Only - settable when ``integrated_system`` is equal to - ``CLOUD_BIGTABLE`` - - This field is a member of `oneof`_ ``system_spec``. - gcs_fileset_spec (google.cloud.datacatalog_v1.types.GcsFilesetSpec): - Specification that applies to a Cloud Storage fileset. Valid - only for entries with the ``FILESET`` type. - - This field is a member of `oneof`_ ``type_spec``. - bigquery_table_spec (google.cloud.datacatalog_v1.types.BigQueryTableSpec): - Output only. Specification that applies to a BigQuery table. - Valid only for entries with the ``TABLE`` type. - - This field is a member of `oneof`_ ``type_spec``. - bigquery_date_sharded_spec (google.cloud.datacatalog_v1.types.BigQueryDateShardedSpec): - Output only. Specification for a group of BigQuery tables - with the ``[prefix]YYYYMMDD`` name pattern. - - For more information, see [Introduction to partitioned - tables] - (https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning_versus_sharding). - - This field is a member of `oneof`_ ``type_spec``. - database_table_spec (google.cloud.datacatalog_v1.types.DatabaseTableSpec): - Specification that applies to a table resource. Valid only - for entries with the ``TABLE`` or ``EXPLORE`` type. - - This field is a member of `oneof`_ ``spec``. - data_source_connection_spec (google.cloud.datacatalog_v1.types.DataSourceConnectionSpec): - Specification that applies to a data source connection. - Valid only for entries with the ``DATA_SOURCE_CONNECTION`` - type. - - This field is a member of `oneof`_ ``spec``. - routine_spec (google.cloud.datacatalog_v1.types.RoutineSpec): - Specification that applies to a user-defined function or - procedure. Valid only for entries with the ``ROUTINE`` type. - - This field is a member of `oneof`_ ``spec``. - dataset_spec (google.cloud.datacatalog_v1.types.DatasetSpec): - Specification that applies to a dataset. - - This field is a member of `oneof`_ ``spec``. - fileset_spec (google.cloud.datacatalog_v1.types.FilesetSpec): - Specification that applies to a fileset resource. Valid only - for entries with the ``FILESET`` type. - - This field is a member of `oneof`_ ``spec``. - service_spec (google.cloud.datacatalog_v1.types.ServiceSpec): - Specification that applies to a Service - resource. - - This field is a member of `oneof`_ ``spec``. - model_spec (google.cloud.datacatalog_v1.types.ModelSpec): - Model specification. - - This field is a member of `oneof`_ ``spec``. - display_name (str): - Display name of an entry. - - The maximum size is 500 bytes when encoded in - UTF-8. Default value is an empty string. - description (str): - Entry description that can consist of several - sentences or paragraphs that describe entry - contents. - - The description must not contain Unicode - non-characters as well as C0 and C1 control - codes except tabs (HT), new lines (LF), carriage - returns (CR), and page breaks (FF). - The maximum size is 2000 bytes when encoded in - UTF-8. Default value is an empty string. - business_context (google.cloud.datacatalog_v1.types.BusinessContext): - Business Context of the entry. Not supported - for BigQuery datasets - schema (google.cloud.datacatalog_v1.types.Schema): - Schema of the entry. An entry might not have - any schema attached to it. - source_system_timestamps (google.cloud.datacatalog_v1.types.SystemTimestamps): - Timestamps from the underlying resource, not from the Data - Catalog entry. - - Output only when the entry has a system listed in the - ``IntegratedSystem`` enum. For entries with - ``user_specified_system``, this field is optional and - defaults to an empty timestamp. - usage_signal (google.cloud.datacatalog_v1.types.UsageSignal): - Resource usage statistics. - labels (MutableMapping[str, str]): - Cloud labels attached to the entry. - - In Data Catalog, you can create and modify - labels attached only to custom entries. Synced - entries have unmodifiable labels that come from - the source system. - data_source (google.cloud.datacatalog_v1.types.DataSource): - Output only. Physical location of the entry. - personal_details (google.cloud.datacatalog_v1.types.PersonalDetails): - Output only. Additional information related - to the entry. Private to the current user. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - linked_resource: str = proto.Field( - proto.STRING, - number=9, - ) - fully_qualified_name: str = proto.Field( - proto.STRING, - number=29, - ) - type_: 'EntryType' = proto.Field( - proto.ENUM, - number=2, - oneof='entry_type', - enum='EntryType', - ) - user_specified_type: str = proto.Field( - proto.STRING, - number=16, - oneof='entry_type', - ) - integrated_system: common.IntegratedSystem = proto.Field( - proto.ENUM, - number=17, - oneof='system', - enum=common.IntegratedSystem, - ) - user_specified_system: str = proto.Field( - proto.STRING, - number=18, - oneof='system', - ) - sql_database_system_spec: 'SqlDatabaseSystemSpec' = proto.Field( - proto.MESSAGE, - number=39, - oneof='system_spec', - message='SqlDatabaseSystemSpec', - ) - looker_system_spec: 'LookerSystemSpec' = proto.Field( - proto.MESSAGE, - number=40, - oneof='system_spec', - message='LookerSystemSpec', - ) - cloud_bigtable_system_spec: 'CloudBigtableSystemSpec' = proto.Field( - proto.MESSAGE, - number=41, - oneof='system_spec', - message='CloudBigtableSystemSpec', - ) - gcs_fileset_spec: gcd_gcs_fileset_spec.GcsFilesetSpec = proto.Field( - proto.MESSAGE, - number=6, - oneof='type_spec', - message=gcd_gcs_fileset_spec.GcsFilesetSpec, - ) - bigquery_table_spec: table_spec.BigQueryTableSpec = proto.Field( - proto.MESSAGE, - number=12, - oneof='type_spec', - message=table_spec.BigQueryTableSpec, - ) - bigquery_date_sharded_spec: table_spec.BigQueryDateShardedSpec = proto.Field( - proto.MESSAGE, - number=15, - oneof='type_spec', - message=table_spec.BigQueryDateShardedSpec, - ) - database_table_spec: 'DatabaseTableSpec' = proto.Field( - proto.MESSAGE, - number=24, - oneof='spec', - message='DatabaseTableSpec', - ) - data_source_connection_spec: 'DataSourceConnectionSpec' = proto.Field( - proto.MESSAGE, - number=27, - oneof='spec', - message='DataSourceConnectionSpec', - ) - routine_spec: 'RoutineSpec' = proto.Field( - proto.MESSAGE, - number=28, - oneof='spec', - message='RoutineSpec', - ) - dataset_spec: 'DatasetSpec' = proto.Field( - proto.MESSAGE, - number=32, - oneof='spec', - message='DatasetSpec', - ) - fileset_spec: 'FilesetSpec' = proto.Field( - proto.MESSAGE, - number=33, - oneof='spec', - message='FilesetSpec', - ) - service_spec: 'ServiceSpec' = proto.Field( - proto.MESSAGE, - number=42, - oneof='spec', - message='ServiceSpec', - ) - model_spec: 'ModelSpec' = proto.Field( - proto.MESSAGE, - number=43, - oneof='spec', - message='ModelSpec', - ) - display_name: str = proto.Field( - proto.STRING, - number=3, - ) - description: str = proto.Field( - proto.STRING, - number=4, - ) - business_context: 'BusinessContext' = proto.Field( - proto.MESSAGE, - number=37, - message='BusinessContext', - ) - schema: gcd_schema.Schema = proto.Field( - proto.MESSAGE, - number=5, - message=gcd_schema.Schema, - ) - source_system_timestamps: timestamps.SystemTimestamps = proto.Field( - proto.MESSAGE, - number=7, - message=timestamps.SystemTimestamps, - ) - usage_signal: usage.UsageSignal = proto.Field( - proto.MESSAGE, - number=13, - message=usage.UsageSignal, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=14, - ) - data_source: gcd_data_source.DataSource = proto.Field( - proto.MESSAGE, - number=20, - message=gcd_data_source.DataSource, - ) - personal_details: common.PersonalDetails = proto.Field( - proto.MESSAGE, - number=26, - message=common.PersonalDetails, - ) - - -class DatabaseTableSpec(proto.Message): - r"""Specification that applies to a table resource. Valid only for - entries with the ``TABLE`` type. - - Attributes: - type_ (google.cloud.datacatalog_v1.types.DatabaseTableSpec.TableType): - Type of this table. - dataplex_table (google.cloud.datacatalog_v1.types.DataplexTableSpec): - Output only. Fields specific to a Dataplex - table and present only in the Dataplex table - entries. - database_view_spec (google.cloud.datacatalog_v1.types.DatabaseTableSpec.DatabaseViewSpec): - Spec what aplies to tables that are actually - views. Not set for "real" tables. - """ - class TableType(proto.Enum): - r"""Type of the table. - - Values: - TABLE_TYPE_UNSPECIFIED (0): - Default unknown table type. - NATIVE (1): - Native table. - EXTERNAL (2): - External table. - """ - TABLE_TYPE_UNSPECIFIED = 0 - NATIVE = 1 - EXTERNAL = 2 - - class DatabaseViewSpec(proto.Message): - r"""Specification that applies to database view. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - view_type (google.cloud.datacatalog_v1.types.DatabaseTableSpec.DatabaseViewSpec.ViewType): - Type of this view. - base_table (str): - Name of a singular table this view reflects - one to one. - - This field is a member of `oneof`_ ``source_definition``. - sql_query (str): - SQL query used to generate this view. - - This field is a member of `oneof`_ ``source_definition``. - """ - class ViewType(proto.Enum): - r"""Concrete type of the view. - - Values: - VIEW_TYPE_UNSPECIFIED (0): - Default unknown view type. - STANDARD_VIEW (1): - Standard view. - MATERIALIZED_VIEW (2): - Materialized view. - """ - VIEW_TYPE_UNSPECIFIED = 0 - STANDARD_VIEW = 1 - MATERIALIZED_VIEW = 2 - - view_type: 'DatabaseTableSpec.DatabaseViewSpec.ViewType' = proto.Field( - proto.ENUM, - number=1, - enum='DatabaseTableSpec.DatabaseViewSpec.ViewType', - ) - base_table: str = proto.Field( - proto.STRING, - number=2, - oneof='source_definition', - ) - sql_query: str = proto.Field( - proto.STRING, - number=3, - oneof='source_definition', - ) - - type_: TableType = proto.Field( - proto.ENUM, - number=1, - enum=TableType, - ) - dataplex_table: dataplex_spec.DataplexTableSpec = proto.Field( - proto.MESSAGE, - number=2, - message=dataplex_spec.DataplexTableSpec, - ) - database_view_spec: DatabaseViewSpec = proto.Field( - proto.MESSAGE, - number=3, - message=DatabaseViewSpec, - ) - - -class FilesetSpec(proto.Message): - r"""Specification that applies to a fileset. Valid only for - entries with the 'FILESET' type. - - Attributes: - dataplex_fileset (google.cloud.datacatalog_v1.types.DataplexFilesetSpec): - Fields specific to a Dataplex fileset and - present only in the Dataplex fileset entries. - """ - - dataplex_fileset: dataplex_spec.DataplexFilesetSpec = proto.Field( - proto.MESSAGE, - number=1, - message=dataplex_spec.DataplexFilesetSpec, - ) - - -class DataSourceConnectionSpec(proto.Message): - r"""Specification that applies to a data source connection. Valid only - for entries with the ``DATA_SOURCE_CONNECTION`` type. Only one of - internal specs can be set at the time, and cannot be changed later. - - Attributes: - bigquery_connection_spec (google.cloud.datacatalog_v1.types.BigQueryConnectionSpec): - Output only. Fields specific to BigQuery - connections. - """ - - bigquery_connection_spec: bigquery.BigQueryConnectionSpec = proto.Field( - proto.MESSAGE, - number=1, - message=bigquery.BigQueryConnectionSpec, - ) - - -class RoutineSpec(proto.Message): - r"""Specification that applies to a routine. Valid only for entries with - the ``ROUTINE`` type. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - routine_type (google.cloud.datacatalog_v1.types.RoutineSpec.RoutineType): - The type of the routine. - language (str): - The language the routine is written in. The exact value - depends on the source system. For BigQuery routines, - possible values are: - - - ``SQL`` - - ``JAVASCRIPT`` - routine_arguments (MutableSequence[google.cloud.datacatalog_v1.types.RoutineSpec.Argument]): - Arguments of the routine. - return_type (str): - Return type of the argument. The exact value - depends on the source system and the language. - definition_body (str): - The body of the routine. - bigquery_routine_spec (google.cloud.datacatalog_v1.types.BigQueryRoutineSpec): - Fields specific for BigQuery routines. - - This field is a member of `oneof`_ ``system_spec``. - """ - class RoutineType(proto.Enum): - r"""The fine-grained type of the routine. - - Values: - ROUTINE_TYPE_UNSPECIFIED (0): - Unspecified type. - SCALAR_FUNCTION (1): - Non-builtin permanent scalar function. - PROCEDURE (2): - Stored procedure. - """ - ROUTINE_TYPE_UNSPECIFIED = 0 - SCALAR_FUNCTION = 1 - PROCEDURE = 2 - - class Argument(proto.Message): - r"""Input or output argument of a function or stored procedure. - - Attributes: - name (str): - The name of the argument. A return argument - of a function might not have a name. - mode (google.cloud.datacatalog_v1.types.RoutineSpec.Argument.Mode): - Specifies whether the argument is input or - output. - type_ (str): - Type of the argument. The exact value depends - on the source system and the language. - """ - class Mode(proto.Enum): - r"""The input or output mode of the argument. - - Values: - MODE_UNSPECIFIED (0): - Unspecified mode. - IN (1): - The argument is input-only. - OUT (2): - The argument is output-only. - INOUT (3): - The argument is both an input and an output. - """ - MODE_UNSPECIFIED = 0 - IN = 1 - OUT = 2 - INOUT = 3 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - mode: 'RoutineSpec.Argument.Mode' = proto.Field( - proto.ENUM, - number=2, - enum='RoutineSpec.Argument.Mode', - ) - type_: str = proto.Field( - proto.STRING, - number=3, - ) - - routine_type: RoutineType = proto.Field( - proto.ENUM, - number=1, - enum=RoutineType, - ) - language: str = proto.Field( - proto.STRING, - number=2, - ) - routine_arguments: MutableSequence[Argument] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=Argument, - ) - return_type: str = proto.Field( - proto.STRING, - number=4, - ) - definition_body: str = proto.Field( - proto.STRING, - number=5, - ) - bigquery_routine_spec: bigquery.BigQueryRoutineSpec = proto.Field( - proto.MESSAGE, - number=6, - oneof='system_spec', - message=bigquery.BigQueryRoutineSpec, - ) - - -class DatasetSpec(proto.Message): - r"""Specification that applies to a dataset. Valid only for entries with - the ``DATASET`` type. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - vertex_dataset_spec (google.cloud.datacatalog_v1.types.VertexDatasetSpec): - Vertex AI Dataset specific fields - - This field is a member of `oneof`_ ``system_spec``. - """ - - vertex_dataset_spec: 'VertexDatasetSpec' = proto.Field( - proto.MESSAGE, - number=2, - oneof='system_spec', - message='VertexDatasetSpec', - ) - - -class SqlDatabaseSystemSpec(proto.Message): - r"""Specification that applies to entries that are part ``SQL_DATABASE`` - system (user_specified_type) - - Attributes: - sql_engine (str): - SQL Database Engine. enum SqlEngine { UNDEFINED = 0; MY_SQL - = 1; POSTGRE_SQL = 2; SQL_SERVER = 3; } Engine of the - enclosing database instance. - database_version (str): - Version of the database engine. - instance_host (str): - Host of the SQL database enum InstanceHost { UNDEFINED = 0; - SELF_HOSTED = 1; CLOUD_SQL = 2; AMAZON_RDS = 3; AZURE_SQL = - 4; } Host of the enclousing database instance. - """ - - sql_engine: str = proto.Field( - proto.STRING, - number=1, - ) - database_version: str = proto.Field( - proto.STRING, - number=2, - ) - instance_host: str = proto.Field( - proto.STRING, - number=3, - ) - - -class LookerSystemSpec(proto.Message): - r"""Specification that applies to entries that are part ``LOOKER`` - system (user_specified_type) - - Attributes: - parent_instance_id (str): - ID of the parent Looker Instance. Empty if it does not - exist. Example value: ``someinstance.looker.com`` - parent_instance_display_name (str): - Name of the parent Looker Instance. Empty if - it does not exist. - parent_model_id (str): - ID of the parent Model. Empty if it does not - exist. - parent_model_display_name (str): - Name of the parent Model. Empty if it does - not exist. - parent_view_id (str): - ID of the parent View. Empty if it does not - exist. - parent_view_display_name (str): - Name of the parent View. Empty if it does not - exist. - """ - - parent_instance_id: str = proto.Field( - proto.STRING, - number=1, - ) - parent_instance_display_name: str = proto.Field( - proto.STRING, - number=2, - ) - parent_model_id: str = proto.Field( - proto.STRING, - number=3, - ) - parent_model_display_name: str = proto.Field( - proto.STRING, - number=4, - ) - parent_view_id: str = proto.Field( - proto.STRING, - number=5, - ) - parent_view_display_name: str = proto.Field( - proto.STRING, - number=6, - ) - - -class CloudBigtableSystemSpec(proto.Message): - r"""Specification that applies to all entries that are part of - ``CLOUD_BIGTABLE`` system (user_specified_type) - - Attributes: - instance_display_name (str): - Display name of the Instance. This is user - specified and different from the resource name. - """ - - instance_display_name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CloudBigtableInstanceSpec(proto.Message): - r"""Specification that applies to Instance entries that are part of - ``CLOUD_BIGTABLE`` system. (user_specified_type) - - Attributes: - cloud_bigtable_cluster_specs (MutableSequence[google.cloud.datacatalog_v1.types.CloudBigtableInstanceSpec.CloudBigtableClusterSpec]): - The list of clusters for the Instance. - """ - - class CloudBigtableClusterSpec(proto.Message): - r"""Spec that applies to clusters of an Instance of Cloud - Bigtable. - - Attributes: - display_name (str): - Name of the cluster. - location (str): - Location of the cluster, typically a Cloud - zone. - type_ (str): - Type of the resource. For a cluster this - would be "CLUSTER". - linked_resource (str): - A link back to the parent resource, in this - case Instance. - """ - - display_name: str = proto.Field( - proto.STRING, - number=1, - ) - location: str = proto.Field( - proto.STRING, - number=2, - ) - type_: str = proto.Field( - proto.STRING, - number=3, - ) - linked_resource: str = proto.Field( - proto.STRING, - number=4, - ) - - cloud_bigtable_cluster_specs: MutableSequence[CloudBigtableClusterSpec] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=CloudBigtableClusterSpec, - ) - - -class ServiceSpec(proto.Message): - r"""Specification that applies to a Service resource. Valid only for - entries with the ``SERVICE`` type. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - cloud_bigtable_instance_spec (google.cloud.datacatalog_v1.types.CloudBigtableInstanceSpec): - Specification that applies to Instance entries of - ``CLOUD_BIGTABLE`` system. - - This field is a member of `oneof`_ ``system_spec``. - """ - - cloud_bigtable_instance_spec: 'CloudBigtableInstanceSpec' = proto.Field( - proto.MESSAGE, - number=1, - oneof='system_spec', - message='CloudBigtableInstanceSpec', - ) - - -class VertexModelSourceInfo(proto.Message): - r"""Detail description of the source information of a Vertex - model. - - Attributes: - source_type (google.cloud.datacatalog_v1.types.VertexModelSourceInfo.ModelSourceType): - Type of the model source. - copy (bool): - If this Model is copy of another Model. If true then - [source_type][google.cloud.datacatalog.v1.VertexModelSourceInfo.source_type] - pertains to the original. - """ - class ModelSourceType(proto.Enum): - r"""Source of the model. - - Values: - MODEL_SOURCE_TYPE_UNSPECIFIED (0): - Should not be used. - AUTOML (1): - The Model is uploaded by automl training - pipeline. - CUSTOM (2): - The Model is uploaded by user or custom - training pipeline. - BQML (3): - The Model is registered and sync'ed from - BigQuery ML. - MODEL_GARDEN (4): - The Model is saved or tuned from Model - Garden. - """ - MODEL_SOURCE_TYPE_UNSPECIFIED = 0 - AUTOML = 1 - CUSTOM = 2 - BQML = 3 - MODEL_GARDEN = 4 - - source_type: ModelSourceType = proto.Field( - proto.ENUM, - number=1, - enum=ModelSourceType, - ) - copy: bool = proto.Field( - proto.BOOL, - number=2, - ) - - -class VertexModelSpec(proto.Message): - r"""Specification for vertex model resources. - - Attributes: - version_id (str): - The version ID of the model. - version_aliases (MutableSequence[str]): - User provided version aliases so that a model - version can be referenced via alias - version_description (str): - The description of this version. - vertex_model_source_info (google.cloud.datacatalog_v1.types.VertexModelSourceInfo): - Source of a Vertex model. - container_image_uri (str): - URI of the Docker image to be used as the - custom container for serving predictions. - """ - - version_id: str = proto.Field( - proto.STRING, - number=1, - ) - version_aliases: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - version_description: str = proto.Field( - proto.STRING, - number=3, - ) - vertex_model_source_info: 'VertexModelSourceInfo' = proto.Field( - proto.MESSAGE, - number=4, - message='VertexModelSourceInfo', - ) - container_image_uri: str = proto.Field( - proto.STRING, - number=5, - ) - - -class VertexDatasetSpec(proto.Message): - r"""Specification for vertex dataset resources. - - Attributes: - data_item_count (int): - The number of DataItems in this Dataset. Only - apply for non-structured Dataset. - data_type (google.cloud.datacatalog_v1.types.VertexDatasetSpec.DataType): - Type of the dataset. - """ - class DataType(proto.Enum): - r"""Type of data stored in the dataset. - - Values: - DATA_TYPE_UNSPECIFIED (0): - Should not be used. - TABLE (1): - Structured data dataset. - IMAGE (2): - Image dataset which supports - ImageClassification, ImageObjectDetection and - ImageSegmentation problems. - TEXT (3): - Document dataset which supports - TextClassification, TextExtraction and - TextSentiment problems. - VIDEO (4): - Video dataset which supports - VideoClassification, VideoObjectTracking and - VideoActionRecognition problems. - CONVERSATION (5): - Conversation dataset which supports - conversation problems. - TIME_SERIES (6): - TimeSeries dataset. - DOCUMENT (7): - Document dataset which supports - DocumentAnnotation problems. - TEXT_TO_SPEECH (8): - TextToSpeech dataset which supports - TextToSpeech problems. - TRANSLATION (9): - Translation dataset which supports - Translation problems. - STORE_VISION (10): - Store Vision dataset which is used for HITL - integration. - ENTERPRISE_KNOWLEDGE_GRAPH (11): - Enterprise Knowledge Graph dataset which is - used for HITL labeling integration. - TEXT_PROMPT (12): - Text prompt dataset which supports Large - Language Models. - """ - DATA_TYPE_UNSPECIFIED = 0 - TABLE = 1 - IMAGE = 2 - TEXT = 3 - VIDEO = 4 - CONVERSATION = 5 - TIME_SERIES = 6 - DOCUMENT = 7 - TEXT_TO_SPEECH = 8 - TRANSLATION = 9 - STORE_VISION = 10 - ENTERPRISE_KNOWLEDGE_GRAPH = 11 - TEXT_PROMPT = 12 - - data_item_count: int = proto.Field( - proto.INT64, - number=1, - ) - data_type: DataType = proto.Field( - proto.ENUM, - number=2, - enum=DataType, - ) - - -class ModelSpec(proto.Message): - r"""Specification that applies to a model. Valid only for entries with - the ``MODEL`` type. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - vertex_model_spec (google.cloud.datacatalog_v1.types.VertexModelSpec): - Specification for vertex model resources. - - This field is a member of `oneof`_ ``system_spec``. - """ - - vertex_model_spec: 'VertexModelSpec' = proto.Field( - proto.MESSAGE, - number=1, - oneof='system_spec', - message='VertexModelSpec', - ) - - -class BusinessContext(proto.Message): - r"""Business Context of the entry. - - Attributes: - entry_overview (google.cloud.datacatalog_v1.types.EntryOverview): - Entry overview fields for rich text - descriptions of entries. - contacts (google.cloud.datacatalog_v1.types.Contacts): - Contact people for the entry. - """ - - entry_overview: 'EntryOverview' = proto.Field( - proto.MESSAGE, - number=1, - message='EntryOverview', - ) - contacts: 'Contacts' = proto.Field( - proto.MESSAGE, - number=2, - message='Contacts', - ) - - -class EntryOverview(proto.Message): - r"""Entry overview fields for rich text descriptions of entries. - - Attributes: - overview (str): - Entry overview with support for rich text. - - The overview must only contain Unicode - characters, and should be formatted using HTML. - The maximum length is 10 MiB as this value holds - HTML descriptions including encoded images. The - maximum length of the text without images is 100 - KiB. - """ - - overview: str = proto.Field( - proto.STRING, - number=1, - ) - - -class Contacts(proto.Message): - r"""Contact people for the entry. - - Attributes: - people (MutableSequence[google.cloud.datacatalog_v1.types.Contacts.Person]): - The list of contact people for the entry. - """ - - class Person(proto.Message): - r"""A contact person for the entry. - - Attributes: - designation (str): - Designation of the person, for example, Data - Steward. - email (str): - Email of the person in the format of ``john.doe@xyz``, - ````, or ``John Doe``. - """ - - designation: str = proto.Field( - proto.STRING, - number=1, - ) - email: str = proto.Field( - proto.STRING, - number=2, - ) - - people: MutableSequence[Person] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=Person, - ) - - -class EntryGroup(proto.Message): - r"""Entry group metadata. - - An ``EntryGroup`` resource represents a logical grouping of zero or - more Data Catalog [Entry][google.cloud.datacatalog.v1.Entry] - resources. - - Attributes: - name (str): - The resource name of the entry group in URL - format. - Note: The entry group itself and its child - resources might not be stored in the location - specified in its name. - display_name (str): - A short name to identify the entry group, for - example, "analytics data - jan 2011". Default - value is an empty string. - description (str): - Entry group description. Can consist of - several sentences or paragraphs that describe - the entry group contents. Default value is an - empty string. - data_catalog_timestamps (google.cloud.datacatalog_v1.types.SystemTimestamps): - Output only. Timestamps of the entry group. - Default value is empty. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - data_catalog_timestamps: timestamps.SystemTimestamps = proto.Field( - proto.MESSAGE, - number=4, - message=timestamps.SystemTimestamps, - ) - - -class CreateTagTemplateRequest(proto.Message): - r"""Request message for - [CreateTagTemplate][google.cloud.datacatalog.v1.DataCatalog.CreateTagTemplate]. - - Attributes: - parent (str): - Required. The name of the project and the template location - `region `__. - tag_template_id (str): - Required. The ID of the tag template to create. - - The ID must contain only lowercase letters (a-z), numbers - (0-9), or underscores (_), and must start with a letter or - underscore. The maximum size is 64 bytes when encoded in - UTF-8. - tag_template (google.cloud.datacatalog_v1.types.TagTemplate): - Required. The tag template to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - tag_template_id: str = proto.Field( - proto.STRING, - number=3, - ) - tag_template: gcd_tags.TagTemplate = proto.Field( - proto.MESSAGE, - number=2, - message=gcd_tags.TagTemplate, - ) - - -class GetTagTemplateRequest(proto.Message): - r"""Request message for - [GetTagTemplate][google.cloud.datacatalog.v1.DataCatalog.GetTagTemplate]. - - Attributes: - name (str): - Required. The name of the tag template to - get. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateTagTemplateRequest(proto.Message): - r"""Request message for - [UpdateTagTemplate][google.cloud.datacatalog.v1.DataCatalog.UpdateTagTemplate]. - - Attributes: - tag_template (google.cloud.datacatalog_v1.types.TagTemplate): - Required. The template to update. The ``name`` field must be - set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Names of fields whose values to overwrite on a tag template. - Currently, only ``display_name`` and - ``is_publicly_readable`` can be overwritten. - - If this parameter is absent or empty, all modifiable fields - are overwritten. If such fields are non-required and omitted - in the request body, their values are emptied. - - Note: Updating the ``is_publicly_readable`` field may - require up to 12 hours to take effect in search results. - """ - - tag_template: gcd_tags.TagTemplate = proto.Field( - proto.MESSAGE, - number=1, - message=gcd_tags.TagTemplate, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteTagTemplateRequest(proto.Message): - r"""Request message for - [DeleteTagTemplate][google.cloud.datacatalog.v1.DataCatalog.DeleteTagTemplate]. - - Attributes: - name (str): - Required. The name of the tag template to - delete. - force (bool): - Required. If true, deletes all tags that use this template. - - Currently, ``true`` is the only supported value. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - force: bool = proto.Field( - proto.BOOL, - number=2, - ) - - -class CreateTagRequest(proto.Message): - r"""Request message for - [CreateTag][google.cloud.datacatalog.v1.DataCatalog.CreateTag]. - - Attributes: - parent (str): - Required. The name of the resource to attach - this tag to. - Tags can be attached to entries or entry groups. - An entry can have up to 1000 attached tags. - - Note: The tag and its child resources might not - be stored in the location specified in its name. - tag (google.cloud.datacatalog_v1.types.Tag): - Required. The tag to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - tag: gcd_tags.Tag = proto.Field( - proto.MESSAGE, - number=2, - message=gcd_tags.Tag, - ) - - -class UpdateTagRequest(proto.Message): - r"""Request message for - [UpdateTag][google.cloud.datacatalog.v1.DataCatalog.UpdateTag]. - - Attributes: - tag (google.cloud.datacatalog_v1.types.Tag): - Required. The updated tag. The "name" field - must be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Names of fields whose values to overwrite on a tag. - Currently, a tag has the only modifiable field with the name - ``fields``. - - In general, if this parameter is absent or empty, all - modifiable fields are overwritten. If such fields are - non-required and omitted in the request body, their values - are emptied. - """ - - tag: gcd_tags.Tag = proto.Field( - proto.MESSAGE, - number=1, - message=gcd_tags.Tag, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteTagRequest(proto.Message): - r"""Request message for - [DeleteTag][google.cloud.datacatalog.v1.DataCatalog.DeleteTag]. - - Attributes: - name (str): - Required. The name of the tag to delete. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateTagTemplateFieldRequest(proto.Message): - r"""Request message for - [CreateTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.CreateTagTemplateField]. - - Attributes: - parent (str): - Required. The name of the project and the template location - `region `__. - tag_template_field_id (str): - Required. The ID of the tag template field to create. - - Note: Adding a required field to an existing template is - *not* allowed. - - Field IDs can contain letters (both uppercase and - lowercase), numbers (0-9), underscores (_) and dashes (-). - Field IDs must be at least 1 character long and at most 128 - characters long. Field IDs must also be unique within their - template. - tag_template_field (google.cloud.datacatalog_v1.types.TagTemplateField): - Required. The tag template field to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - tag_template_field_id: str = proto.Field( - proto.STRING, - number=2, - ) - tag_template_field: gcd_tags.TagTemplateField = proto.Field( - proto.MESSAGE, - number=3, - message=gcd_tags.TagTemplateField, - ) - - -class UpdateTagTemplateFieldRequest(proto.Message): - r"""Request message for - [UpdateTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.UpdateTagTemplateField]. - - Attributes: - name (str): - Required. The name of the tag template field. - tag_template_field (google.cloud.datacatalog_v1.types.TagTemplateField): - Required. The template to update. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Names of fields whose values to overwrite on an - individual field of a tag template. The following fields are - modifiable: - - - ``display_name`` - - ``type.enum_type`` - - ``is_required`` - - If this parameter is absent or empty, all modifiable fields - are overwritten. If such fields are non-required and omitted - in the request body, their values are emptied with one - exception: when updating an enum type, the provided values - are merged with the existing values. Therefore, enum values - can only be added, existing enum values cannot be deleted or - renamed. - - Additionally, updating a template field from optional to - required is *not* allowed. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - tag_template_field: gcd_tags.TagTemplateField = proto.Field( - proto.MESSAGE, - number=2, - message=gcd_tags.TagTemplateField, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class RenameTagTemplateFieldRequest(proto.Message): - r"""Request message for - [RenameTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateField]. - - Attributes: - name (str): - Required. The name of the tag template field. - new_tag_template_field_id (str): - Required. The new ID of this tag template field. For - example, ``my_new_field``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - new_tag_template_field_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class RenameTagTemplateFieldEnumValueRequest(proto.Message): - r"""Request message for - [RenameTagTemplateFieldEnumValue][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateFieldEnumValue]. - - Attributes: - name (str): - Required. The name of the enum field value. - new_enum_value_display_name (str): - Required. The new display name of the enum value. For - example, ``my_new_enum_value``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - new_enum_value_display_name: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteTagTemplateFieldRequest(proto.Message): - r"""Request message for - [DeleteTagTemplateField][google.cloud.datacatalog.v1.DataCatalog.DeleteTagTemplateField]. - - Attributes: - name (str): - Required. The name of the tag template field - to delete. - force (bool): - Required. If true, deletes this field from any tags that use - it. - - Currently, ``true`` is the only supported value. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - force: bool = proto.Field( - proto.BOOL, - number=2, - ) - - -class ListTagsRequest(proto.Message): - r"""Request message for - [ListTags][google.cloud.datacatalog.v1.DataCatalog.ListTags]. - - Attributes: - parent (str): - Required. The name of the Data Catalog resource to list the - tags of. - - The resource can be an - [Entry][google.cloud.datacatalog.v1.Entry] or an - [EntryGroup][google.cloud.datacatalog.v1.EntryGroup] - (without ``/entries/{entries}`` at the end). - page_size (int): - The maximum number of tags to return. Default - is 10. Maximum limit is 1000. - page_token (str): - Pagination token that specifies the next page - to return. If empty, the first page is returned. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListTagsResponse(proto.Message): - r"""Response message for - [ListTags][google.cloud.datacatalog.v1.DataCatalog.ListTags]. - - Attributes: - tags (MutableSequence[google.cloud.datacatalog_v1.types.Tag]): - [Tag][google.cloud.datacatalog.v1.Tag] details. - next_page_token (str): - Pagination token of the next results page. - Empty if there are no more items in results. - """ - - @property - def raw_page(self): - return self - - tags: MutableSequence[gcd_tags.Tag] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gcd_tags.Tag, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ReconcileTagsRequest(proto.Message): - r"""Request message for - [ReconcileTags][google.cloud.datacatalog.v1.DataCatalog.ReconcileTags]. - - Attributes: - parent (str): - Required. Name of [Entry][google.cloud.datacatalog.v1.Entry] - to be tagged. - tag_template (str): - Required. The name of the tag template, which - is used for reconciliation. - force_delete_missing (bool): - If set to ``true``, deletes entry tags related to a tag - template not listed in the tags source from an entry. If set - to ``false``, unlisted tags are retained. - tags (MutableSequence[google.cloud.datacatalog_v1.types.Tag]): - A list of tags to apply to an entry. A tag can specify a tag - template, which must be the template specified in the - ``ReconcileTagsRequest``. The sole entry and each of its - columns must be mentioned at most once. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - tag_template: str = proto.Field( - proto.STRING, - number=2, - ) - force_delete_missing: bool = proto.Field( - proto.BOOL, - number=3, - ) - tags: MutableSequence[gcd_tags.Tag] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=gcd_tags.Tag, - ) - - -class ReconcileTagsResponse(proto.Message): - r"""[Long-running operation][google.longrunning.Operation] response - message returned by - [ReconcileTags][google.cloud.datacatalog.v1.DataCatalog.ReconcileTags]. - - Attributes: - created_tags_count (int): - Number of tags created in the request. - updated_tags_count (int): - Number of tags updated in the request. - deleted_tags_count (int): - Number of tags deleted in the request. - """ - - created_tags_count: int = proto.Field( - proto.INT64, - number=1, - ) - updated_tags_count: int = proto.Field( - proto.INT64, - number=2, - ) - deleted_tags_count: int = proto.Field( - proto.INT64, - number=3, - ) - - -class ReconcileTagsMetadata(proto.Message): - r"""[Long-running operation][google.longrunning.Operation] metadata - message returned by the - [ReconcileTags][google.cloud.datacatalog.v1.DataCatalog.ReconcileTags]. - - Attributes: - state (google.cloud.datacatalog_v1.types.ReconcileTagsMetadata.ReconciliationState): - State of the reconciliation operation. - errors (MutableMapping[str, google.rpc.status_pb2.Status]): - Maps the name of each tagged column (or empty string for a - sole entry) to tagging operation - [status][google.rpc.Status]. - """ - class ReconciliationState(proto.Enum): - r"""Enum holding possible states of the reconciliation operation. - - Values: - RECONCILIATION_STATE_UNSPECIFIED (0): - Default value. This value is unused. - RECONCILIATION_QUEUED (1): - The reconciliation has been queued and awaits - for execution. - RECONCILIATION_IN_PROGRESS (2): - The reconciliation is in progress. - RECONCILIATION_DONE (3): - The reconciliation has been finished. - """ - RECONCILIATION_STATE_UNSPECIFIED = 0 - RECONCILIATION_QUEUED = 1 - RECONCILIATION_IN_PROGRESS = 2 - RECONCILIATION_DONE = 3 - - state: ReconciliationState = proto.Field( - proto.ENUM, - number=1, - enum=ReconciliationState, - ) - errors: MutableMapping[str, status_pb2.Status] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=2, - message=status_pb2.Status, - ) - - -class ListEntriesRequest(proto.Message): - r"""Request message for - [ListEntries][google.cloud.datacatalog.v1.DataCatalog.ListEntries]. - - Attributes: - parent (str): - Required. The name of the entry group that - contains the entries to list. - Can be provided in URL format. - page_size (int): - The maximum number of items to return. Default is 10. - Maximum limit is 1000. Throws an invalid argument if - ``page_size`` is more than 1000. - page_token (str): - Pagination token that specifies the next page - to return. If empty, the first page is returned. - read_mask (google.protobuf.field_mask_pb2.FieldMask): - The fields to return for each entry. If empty or omitted, - all fields are returned. - - For example, to return a list of entries with only the - ``name`` field, set ``read_mask`` to only one path with the - ``name`` value. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - read_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=4, - message=field_mask_pb2.FieldMask, - ) - - -class ListEntriesResponse(proto.Message): - r"""Response message for - [ListEntries][google.cloud.datacatalog.v1.DataCatalog.ListEntries]. - - Attributes: - entries (MutableSequence[google.cloud.datacatalog_v1.types.Entry]): - Entry details. - next_page_token (str): - Pagination token of the next results page. - Empty if there are no more items in results. - """ - - @property - def raw_page(self): - return self - - entries: MutableSequence['Entry'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Entry', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class StarEntryRequest(proto.Message): - r"""Request message for - [StarEntry][google.cloud.datacatalog.v1.DataCatalog.StarEntry]. - - Attributes: - name (str): - Required. The name of the entry to mark as - starred. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class StarEntryResponse(proto.Message): - r"""Response message for - [StarEntry][google.cloud.datacatalog.v1.DataCatalog.StarEntry]. - Empty for now - - """ - - -class UnstarEntryRequest(proto.Message): - r"""Request message for - [UnstarEntry][google.cloud.datacatalog.v1.DataCatalog.UnstarEntry]. - - Attributes: - name (str): - Required. The name of the entry to mark as **not** starred. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UnstarEntryResponse(proto.Message): - r"""Response message for - [UnstarEntry][google.cloud.datacatalog.v1.DataCatalog.UnstarEntry]. - Empty for now - - """ - - -class ImportEntriesRequest(proto.Message): - r"""Request message for - [ImportEntries][google.cloud.datacatalog.v1.DataCatalog.ImportEntries] - method. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. Target entry group for ingested - entries. - gcs_bucket_path (str): - Path to a Cloud Storage bucket that contains - a dump ready for ingestion. - - This field is a member of `oneof`_ ``source``. - job_id (str): - Optional. (Optional) Dataplex task job id, if - specified will be used as part of ImportEntries - LRO ID - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - gcs_bucket_path: str = proto.Field( - proto.STRING, - number=2, - oneof='source', - ) - job_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ImportEntriesResponse(proto.Message): - r"""Response message for [long-running - operation][google.longrunning.Operation] returned by the - [ImportEntries][google.cloud.datacatalog.v1.DataCatalog.ImportEntries]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - upserted_entries_count (int): - Cumulative number of entries created and - entries updated as a result of import operation. - - This field is a member of `oneof`_ ``_upserted_entries_count``. - deleted_entries_count (int): - Number of entries deleted as a result of - import operation. - - This field is a member of `oneof`_ ``_deleted_entries_count``. - """ - - upserted_entries_count: int = proto.Field( - proto.INT64, - number=5, - optional=True, - ) - deleted_entries_count: int = proto.Field( - proto.INT64, - number=6, - optional=True, - ) - - -class ImportEntriesMetadata(proto.Message): - r"""Metadata message for [long-running - operation][google.longrunning.Operation] returned by the - [ImportEntries][google.cloud.datacatalog.v1.DataCatalog.ImportEntries]. - - Attributes: - state (google.cloud.datacatalog_v1.types.ImportEntriesMetadata.ImportState): - State of the import operation. - errors (MutableSequence[google.rpc.status_pb2.Status]): - Partial errors that are encountered during - the ImportEntries operation. There is no - guarantee that all the encountered errors are - reported. However, if no errors are reported, it - means that no errors were encountered. - """ - class ImportState(proto.Enum): - r"""Enum holding possible states of the import operation. - - Values: - IMPORT_STATE_UNSPECIFIED (0): - Default value. This value is unused. - IMPORT_QUEUED (1): - The dump with entries has been queued for - import. - IMPORT_IN_PROGRESS (2): - The import of entries is in progress. - IMPORT_DONE (3): - The import of entries has been finished. - IMPORT_OBSOLETE (4): - The import of entries has been abandoned in - favor of a newer request. - """ - IMPORT_STATE_UNSPECIFIED = 0 - IMPORT_QUEUED = 1 - IMPORT_IN_PROGRESS = 2 - IMPORT_DONE = 3 - IMPORT_OBSOLETE = 4 - - state: ImportState = proto.Field( - proto.ENUM, - number=1, - enum=ImportState, - ) - errors: MutableSequence[status_pb2.Status] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=status_pb2.Status, - ) - - -class ModifyEntryOverviewRequest(proto.Message): - r"""Request message for - [ModifyEntryOverview][google.cloud.datacatalog.v1.DataCatalog.ModifyEntryOverview]. - - Attributes: - name (str): - Required. The full resource name of the - entry. - entry_overview (google.cloud.datacatalog_v1.types.EntryOverview): - Required. The new value for the Entry - Overview. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - entry_overview: 'EntryOverview' = proto.Field( - proto.MESSAGE, - number=2, - message='EntryOverview', - ) - - -class ModifyEntryContactsRequest(proto.Message): - r"""Request message for - [ModifyEntryContacts][google.cloud.datacatalog.v1.DataCatalog.ModifyEntryContacts]. - - Attributes: - name (str): - Required. The full resource name of the - entry. - contacts (google.cloud.datacatalog_v1.types.Contacts): - Required. The new value for the Contacts. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - contacts: 'Contacts' = proto.Field( - proto.MESSAGE, - number=2, - message='Contacts', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/dataplex_spec.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/dataplex_spec.py deleted file mode 100644 index 6d19a71e6562..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/dataplex_spec.py +++ /dev/null @@ -1,170 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.datacatalog_v1.types import common -from google.cloud.datacatalog_v1.types import physical_schema - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1', - manifest={ - 'DataplexSpec', - 'DataplexFilesetSpec', - 'DataplexTableSpec', - 'DataplexExternalTable', - }, -) - - -class DataplexSpec(proto.Message): - r"""Common Dataplex fields. - - Attributes: - asset (str): - Fully qualified resource name of an asset in - Dataplex, to which the underlying data source - (Cloud Storage bucket or BigQuery dataset) of - the entity is attached. - data_format (google.cloud.datacatalog_v1.types.PhysicalSchema): - Format of the data. - compression_format (str): - Compression format of the data, e.g., zip, - gzip etc. - project_id (str): - Project ID of the underlying Cloud Storage or - BigQuery data. Note that this may not be the - same project as the correspondingly Dataplex - lake / zone / asset. - """ - - asset: str = proto.Field( - proto.STRING, - number=1, - ) - data_format: physical_schema.PhysicalSchema = proto.Field( - proto.MESSAGE, - number=2, - message=physical_schema.PhysicalSchema, - ) - compression_format: str = proto.Field( - proto.STRING, - number=3, - ) - project_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class DataplexFilesetSpec(proto.Message): - r"""Entry specyfication for a Dataplex fileset. - - Attributes: - dataplex_spec (google.cloud.datacatalog_v1.types.DataplexSpec): - Common Dataplex fields. - """ - - dataplex_spec: 'DataplexSpec' = proto.Field( - proto.MESSAGE, - number=1, - message='DataplexSpec', - ) - - -class DataplexTableSpec(proto.Message): - r"""Entry specification for a Dataplex table. - - Attributes: - external_tables (MutableSequence[google.cloud.datacatalog_v1.types.DataplexExternalTable]): - List of external tables registered by - Dataplex in other systems based on the same - underlying data. - - External tables allow to query this data in - those systems. - dataplex_spec (google.cloud.datacatalog_v1.types.DataplexSpec): - Common Dataplex fields. - user_managed (bool): - Indicates if the table schema is managed by - the user or not. - """ - - external_tables: MutableSequence['DataplexExternalTable'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DataplexExternalTable', - ) - dataplex_spec: 'DataplexSpec' = proto.Field( - proto.MESSAGE, - number=2, - message='DataplexSpec', - ) - user_managed: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class DataplexExternalTable(proto.Message): - r"""External table registered by Dataplex. - Dataplex publishes data discovered from an asset into multiple - other systems (BigQuery, DPMS) in form of tables. We call them - "external tables". External tables are also synced into the Data - Catalog. - This message contains pointers to - those external tables (fully qualified name, resource name et - cetera) within the Data Catalog. - - Attributes: - system (google.cloud.datacatalog_v1.types.IntegratedSystem): - Service in which the external table is - registered. - fully_qualified_name (str): - Fully qualified name (FQN) of the external - table. - google_cloud_resource (str): - Google Cloud resource name of the external - table. - data_catalog_entry (str): - Name of the Data Catalog entry representing - the external table. - """ - - system: common.IntegratedSystem = proto.Field( - proto.ENUM, - number=1, - enum=common.IntegratedSystem, - ) - fully_qualified_name: str = proto.Field( - proto.STRING, - number=28, - ) - google_cloud_resource: str = proto.Field( - proto.STRING, - number=3, - ) - data_catalog_entry: str = proto.Field( - proto.STRING, - number=4, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/dump_content.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/dump_content.py deleted file mode 100644 index 2f859e874e94..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/dump_content.py +++ /dev/null @@ -1,95 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.datacatalog_v1.types import datacatalog -from google.cloud.datacatalog_v1.types import tags - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1', - manifest={ - 'TaggedEntry', - 'DumpItem', - }, -) - - -class TaggedEntry(proto.Message): - r"""Wrapper containing Entry and information about Tags - that should and should not be attached to it. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - v1_entry (google.cloud.datacatalog_v1.types.Entry): - Non-encrypted Data Catalog v1 Entry. - - This field is a member of `oneof`_ ``entry``. - present_tags (MutableSequence[google.cloud.datacatalog_v1.types.Tag]): - Optional. Tags that should be ingested into - the Data Catalog. Caller should populate - template name, column and fields. - absent_tags (MutableSequence[google.cloud.datacatalog_v1.types.Tag]): - Optional. Tags that should be deleted from - the Data Catalog. Caller should populate - template name and column only. - """ - - v1_entry: datacatalog.Entry = proto.Field( - proto.MESSAGE, - number=1, - oneof='entry', - message=datacatalog.Entry, - ) - present_tags: MutableSequence[tags.Tag] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=tags.Tag, - ) - absent_tags: MutableSequence[tags.Tag] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=tags.Tag, - ) - - -class DumpItem(proto.Message): - r"""Wrapper for any item that can be contained in the dump. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - tagged_entry (google.cloud.datacatalog_v1.types.TaggedEntry): - Entry and its tags. - - This field is a member of `oneof`_ ``item``. - """ - - tagged_entry: 'TaggedEntry' = proto.Field( - proto.MESSAGE, - number=1, - oneof='item', - message='TaggedEntry', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/gcs_fileset_spec.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/gcs_fileset_spec.py deleted file mode 100644 index e6cdd35f90f3..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/gcs_fileset_spec.py +++ /dev/null @@ -1,119 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.datacatalog_v1.types import timestamps - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1', - manifest={ - 'GcsFilesetSpec', - 'GcsFileSpec', - }, -) - - -class GcsFilesetSpec(proto.Message): - r"""Describes a Cloud Storage fileset entry. - - Attributes: - file_patterns (MutableSequence[str]): - Required. Patterns to identify a set of files in Google - Cloud Storage. - - For more information, see [Wildcard Names] - (https://cloud.google.com/storage/docs/gsutil/addlhelp/WildcardNames). - - Note: Currently, bucket wildcards are not supported. - - Examples of valid ``file_patterns``: - - - ``gs://bucket_name/dir/*``: matches all files in - ``bucket_name/dir`` directory - - ``gs://bucket_name/dir/**``: matches all files in - ``bucket_name/dir`` and all subdirectories - - ``gs://bucket_name/file*``: matches files prefixed by - ``file`` in ``bucket_name`` - - ``gs://bucket_name/??.txt``: matches files with two - characters followed by ``.txt`` in ``bucket_name`` - - ``gs://bucket_name/[aeiou].txt``: matches files that - contain a single vowel character followed by ``.txt`` in - ``bucket_name`` - - ``gs://bucket_name/[a-m].txt``: matches files that - contain ``a``, ``b``, ... or ``m`` followed by ``.txt`` - in ``bucket_name`` - - ``gs://bucket_name/a/*/b``: matches all files in - ``bucket_name`` that match the ``a/*/b`` pattern, such as - ``a/c/b``, ``a/d/b`` - - ``gs://another_bucket/a.txt``: matches - ``gs://another_bucket/a.txt`` - - You can combine wildcards to match complex sets of files, - for example: - - ``gs://bucket_name/[a-m]??.j*g`` - sample_gcs_file_specs (MutableSequence[google.cloud.datacatalog_v1.types.GcsFileSpec]): - Output only. Sample files contained in this - fileset, not all files contained in this fileset - are represented here. - """ - - file_patterns: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - sample_gcs_file_specs: MutableSequence['GcsFileSpec'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='GcsFileSpec', - ) - - -class GcsFileSpec(proto.Message): - r"""Specification of a single file in Cloud Storage. - - Attributes: - file_path (str): - Required. Full file path. Example: - ``gs://bucket_name/a/b.txt``. - gcs_timestamps (google.cloud.datacatalog_v1.types.SystemTimestamps): - Output only. Creation, modification, and - expiration timestamps of a Cloud Storage file. - size_bytes (int): - Output only. File size in bytes. - """ - - file_path: str = proto.Field( - proto.STRING, - number=1, - ) - gcs_timestamps: timestamps.SystemTimestamps = proto.Field( - proto.MESSAGE, - number=2, - message=timestamps.SystemTimestamps, - ) - size_bytes: int = proto.Field( - proto.INT64, - number=4, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/physical_schema.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/physical_schema.py deleted file mode 100644 index 82d77736ed3b..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/physical_schema.py +++ /dev/null @@ -1,158 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1', - manifest={ - 'PhysicalSchema', - }, -) - - -class PhysicalSchema(proto.Message): - r"""Native schema used by a resource represented as an entry. - Used by query engines for deserializing and parsing source data. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - avro (google.cloud.datacatalog_v1.types.PhysicalSchema.AvroSchema): - Schema in Avro JSON format. - - This field is a member of `oneof`_ ``schema``. - thrift (google.cloud.datacatalog_v1.types.PhysicalSchema.ThriftSchema): - Schema in Thrift format. - - This field is a member of `oneof`_ ``schema``. - protobuf (google.cloud.datacatalog_v1.types.PhysicalSchema.ProtobufSchema): - Schema in protocol buffer format. - - This field is a member of `oneof`_ ``schema``. - parquet (google.cloud.datacatalog_v1.types.PhysicalSchema.ParquetSchema): - Marks a Parquet-encoded data source. - - This field is a member of `oneof`_ ``schema``. - orc (google.cloud.datacatalog_v1.types.PhysicalSchema.OrcSchema): - Marks an ORC-encoded data source. - - This field is a member of `oneof`_ ``schema``. - csv (google.cloud.datacatalog_v1.types.PhysicalSchema.CsvSchema): - Marks a CSV-encoded data source. - - This field is a member of `oneof`_ ``schema``. - """ - - class AvroSchema(proto.Message): - r"""Schema in Avro JSON format. - - Attributes: - text (str): - JSON source of the Avro schema. - """ - - text: str = proto.Field( - proto.STRING, - number=1, - ) - - class ThriftSchema(proto.Message): - r"""Schema in Thrift format. - - Attributes: - text (str): - Thrift IDL source of the schema. - """ - - text: str = proto.Field( - proto.STRING, - number=1, - ) - - class ProtobufSchema(proto.Message): - r"""Schema in protocol buffer format. - - Attributes: - text (str): - Protocol buffer source of the schema. - """ - - text: str = proto.Field( - proto.STRING, - number=1, - ) - - class ParquetSchema(proto.Message): - r"""Marks a Parquet-encoded data source. - """ - - class OrcSchema(proto.Message): - r"""Marks an ORC-encoded data source. - """ - - class CsvSchema(proto.Message): - r"""Marks a CSV-encoded data source. - """ - - avro: AvroSchema = proto.Field( - proto.MESSAGE, - number=1, - oneof='schema', - message=AvroSchema, - ) - thrift: ThriftSchema = proto.Field( - proto.MESSAGE, - number=2, - oneof='schema', - message=ThriftSchema, - ) - protobuf: ProtobufSchema = proto.Field( - proto.MESSAGE, - number=3, - oneof='schema', - message=ProtobufSchema, - ) - parquet: ParquetSchema = proto.Field( - proto.MESSAGE, - number=4, - oneof='schema', - message=ParquetSchema, - ) - orc: OrcSchema = proto.Field( - proto.MESSAGE, - number=5, - oneof='schema', - message=OrcSchema, - ) - csv: CsvSchema = proto.Field( - proto.MESSAGE, - number=6, - oneof='schema', - message=CsvSchema, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/policytagmanager.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/policytagmanager.py deleted file mode 100644 index 4624c46cb4b5..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/policytagmanager.py +++ /dev/null @@ -1,551 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.datacatalog_v1.types import common -from google.cloud.datacatalog_v1.types import timestamps -from google.protobuf import field_mask_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1', - manifest={ - 'Taxonomy', - 'PolicyTag', - 'CreateTaxonomyRequest', - 'DeleteTaxonomyRequest', - 'UpdateTaxonomyRequest', - 'ListTaxonomiesRequest', - 'ListTaxonomiesResponse', - 'GetTaxonomyRequest', - 'CreatePolicyTagRequest', - 'DeletePolicyTagRequest', - 'UpdatePolicyTagRequest', - 'ListPolicyTagsRequest', - 'ListPolicyTagsResponse', - 'GetPolicyTagRequest', - }, -) - - -class Taxonomy(proto.Message): - r"""A taxonomy is a collection of hierarchical policy tags that classify - data along a common axis. - - For example, a "data sensitivity" taxonomy might contain the - following policy tags: - - :: - - + PII - + Account number - + Age - + SSN - + Zipcode - + Financials - + Revenue - - A "data origin" taxonomy might contain the following policy tags: - - :: - - + User data - + Employee data - + Partner data - + Public data - - Attributes: - name (str): - Output only. Resource name of this taxonomy - in URL format. - Note: Policy tag manager generates unique - taxonomy IDs. - display_name (str): - Required. User-defined name of this taxonomy. - - The name can't start or end with spaces, must - contain only Unicode letters, numbers, - underscores, dashes, and spaces, and be at most - 200 bytes long when encoded in UTF-8. - - The taxonomy display name must be unique within - an organization. - description (str): - Optional. Description of this taxonomy. If - not set, defaults to empty. - The description must contain only Unicode - characters, tabs, newlines, carriage returns, - and page breaks, and be at most 2000 bytes long - when encoded in UTF-8. - policy_tag_count (int): - Output only. Number of policy tags in this - taxonomy. - taxonomy_timestamps (google.cloud.datacatalog_v1.types.SystemTimestamps): - Output only. Creation and modification - timestamps of this taxonomy. - activated_policy_types (MutableSequence[google.cloud.datacatalog_v1.types.Taxonomy.PolicyType]): - Optional. A list of policy types that are - activated for this taxonomy. If not set, - defaults to an empty list. - service (google.cloud.datacatalog_v1.types.Taxonomy.Service): - Output only. Identity of the service which - owns the Taxonomy. This field is only populated - when the taxonomy is created by a Google Cloud - service. Currently only 'DATAPLEX' is supported. - """ - class PolicyType(proto.Enum): - r"""Defines policy types where the policy tags can be used for. - - Values: - POLICY_TYPE_UNSPECIFIED (0): - Unspecified policy type. - FINE_GRAINED_ACCESS_CONTROL (1): - Fine-grained access control policy that - enables access control on tagged sub-resources. - """ - POLICY_TYPE_UNSPECIFIED = 0 - FINE_GRAINED_ACCESS_CONTROL = 1 - - class Service(proto.Message): - r"""The source system of the Taxonomy. - - Attributes: - name (google.cloud.datacatalog_v1.types.ManagingSystem): - The Google Cloud service name. - identity (str): - The service agent for the service. - """ - - name: common.ManagingSystem = proto.Field( - proto.ENUM, - number=1, - enum=common.ManagingSystem, - ) - identity: str = proto.Field( - proto.STRING, - number=2, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - policy_tag_count: int = proto.Field( - proto.INT32, - number=4, - ) - taxonomy_timestamps: timestamps.SystemTimestamps = proto.Field( - proto.MESSAGE, - number=5, - message=timestamps.SystemTimestamps, - ) - activated_policy_types: MutableSequence[PolicyType] = proto.RepeatedField( - proto.ENUM, - number=6, - enum=PolicyType, - ) - service: Service = proto.Field( - proto.MESSAGE, - number=7, - message=Service, - ) - - -class PolicyTag(proto.Message): - r"""Denotes one policy tag in a taxonomy, for example, SSN. - - Policy tags can be defined in a hierarchy. For example: - - :: - - + Geolocation - + LatLong - + City - + ZipCode - - Where the "Geolocation" policy tag contains three children. - - Attributes: - name (str): - Output only. Resource name of this policy tag - in the URL format. - The policy tag manager generates unique taxonomy - IDs and policy tag IDs. - display_name (str): - Required. User-defined name of this policy - tag. - The name can't start or end with spaces and must - be unique within the parent taxonomy, contain - only Unicode letters, numbers, underscores, - dashes and spaces, and be at most 200 bytes long - when encoded in UTF-8. - description (str): - Description of this policy tag. If not set, - defaults to empty. - The description must contain only Unicode - characters, tabs, newlines, carriage returns and - page breaks, and be at most 2000 bytes long when - encoded in UTF-8. - parent_policy_tag (str): - Resource name of this policy tag's parent - policy tag. If empty, this is a top level tag. - If not set, defaults to an empty string. - - For example, for the "LatLong" policy tag in the - example above, this field contains the resource - name of the "Geolocation" policy tag, and, for - "Geolocation", this field is empty. - child_policy_tags (MutableSequence[str]): - Output only. Resource names of child policy - tags of this policy tag. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - parent_policy_tag: str = proto.Field( - proto.STRING, - number=4, - ) - child_policy_tags: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - - -class CreateTaxonomyRequest(proto.Message): - r"""Request message for - [CreateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.CreateTaxonomy]. - - Attributes: - parent (str): - Required. Resource name of the project that - the taxonomy will belong to. - taxonomy (google.cloud.datacatalog_v1.types.Taxonomy): - The taxonomy to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - taxonomy: 'Taxonomy' = proto.Field( - proto.MESSAGE, - number=2, - message='Taxonomy', - ) - - -class DeleteTaxonomyRequest(proto.Message): - r"""Request message for - [DeleteTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.DeleteTaxonomy]. - - Attributes: - name (str): - Required. Resource name of the taxonomy to - delete. - Note: All policy tags in this taxonomy are also - deleted. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateTaxonomyRequest(proto.Message): - r"""Request message for - [UpdateTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.UpdateTaxonomy]. - - Attributes: - taxonomy (google.cloud.datacatalog_v1.types.Taxonomy): - The taxonomy to update. You can update only - its description, display name, and activated - policy types. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Specifies fields to update. If not set, defaults to all - fields you can update. - - For more information, see [FieldMask] - (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask). - """ - - taxonomy: 'Taxonomy' = proto.Field( - proto.MESSAGE, - number=1, - message='Taxonomy', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class ListTaxonomiesRequest(proto.Message): - r"""Request message for - [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. - - Attributes: - parent (str): - Required. Resource name of the project to - list the taxonomies of. - page_size (int): - The maximum number of items to return. Must - be a value between 1 and 1000 inclusively. If - not set, defaults to 50. - page_token (str): - The pagination token of the next results - page. If not set, the first page is returned. - - The token is returned in the response to a - previous list request. - filter (str): - Supported field for filter is 'service' and - value is 'dataplex'. Eg: service=dataplex. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListTaxonomiesResponse(proto.Message): - r"""Response message for - [ListTaxonomies][google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies]. - - Attributes: - taxonomies (MutableSequence[google.cloud.datacatalog_v1.types.Taxonomy]): - Taxonomies that the project contains. - next_page_token (str): - Pagination token of the next results page. - Empty if there are no more results in the list. - """ - - @property - def raw_page(self): - return self - - taxonomies: MutableSequence['Taxonomy'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Taxonomy', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetTaxonomyRequest(proto.Message): - r"""Request message for - [GetTaxonomy][google.cloud.datacatalog.v1.PolicyTagManager.GetTaxonomy]. - - Attributes: - name (str): - Required. Resource name of the taxonomy to - get. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreatePolicyTagRequest(proto.Message): - r"""Request message for - [CreatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.CreatePolicyTag]. - - Attributes: - parent (str): - Required. Resource name of the taxonomy that - the policy tag will belong to. - policy_tag (google.cloud.datacatalog_v1.types.PolicyTag): - The policy tag to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - policy_tag: 'PolicyTag' = proto.Field( - proto.MESSAGE, - number=2, - message='PolicyTag', - ) - - -class DeletePolicyTagRequest(proto.Message): - r"""Request message for - [DeletePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.DeletePolicyTag]. - - Attributes: - name (str): - Required. Resource name of the policy tag to - delete. - Note: All of its descendant policy tags are also - deleted. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdatePolicyTagRequest(proto.Message): - r"""Request message for - [UpdatePolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.UpdatePolicyTag]. - - Attributes: - policy_tag (google.cloud.datacatalog_v1.types.PolicyTag): - The policy tag to update. You can update only - its description, display name, and parent policy - tag fields. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Specifies the fields to update. - - You can update only display name, description, and parent - policy tag. If not set, defaults to all updatable fields. - For more information, see [FieldMask] - (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask). - """ - - policy_tag: 'PolicyTag' = proto.Field( - proto.MESSAGE, - number=1, - message='PolicyTag', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class ListPolicyTagsRequest(proto.Message): - r"""Request message for - [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. - - Attributes: - parent (str): - Required. Resource name of the taxonomy to - list the policy tags of. - page_size (int): - The maximum number of items to return. Must - be a value between 1 and 1000 inclusively. - If not set, defaults to 50. - page_token (str): - The pagination token of the next results - page. If not set, returns the first page. - - The token is returned in the response to a - previous list request. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListPolicyTagsResponse(proto.Message): - r"""Response message for - [ListPolicyTags][google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags]. - - Attributes: - policy_tags (MutableSequence[google.cloud.datacatalog_v1.types.PolicyTag]): - The policy tags that belong to the taxonomy. - next_page_token (str): - Pagination token of the next results page. - Empty if there are no more results in the list. - """ - - @property - def raw_page(self): - return self - - policy_tags: MutableSequence['PolicyTag'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='PolicyTag', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetPolicyTagRequest(proto.Message): - r"""Request message for - [GetPolicyTag][google.cloud.datacatalog.v1.PolicyTagManager.GetPolicyTag]. - - Attributes: - name (str): - Required. Resource name of the policy tag. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/policytagmanagerserialization.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/policytagmanagerserialization.py deleted file mode 100644 index 1ed5a73de618..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/policytagmanagerserialization.py +++ /dev/null @@ -1,288 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.datacatalog_v1.types import policytagmanager - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1', - manifest={ - 'SerializedTaxonomy', - 'SerializedPolicyTag', - 'ReplaceTaxonomyRequest', - 'ImportTaxonomiesRequest', - 'InlineSource', - 'CrossRegionalSource', - 'ImportTaxonomiesResponse', - 'ExportTaxonomiesRequest', - 'ExportTaxonomiesResponse', - }, -) - - -class SerializedTaxonomy(proto.Message): - r"""A nested protocol buffer that represents a taxonomy and the - hierarchy of its policy tags. Used for taxonomy replacement, - import, and export. - - Attributes: - display_name (str): - Required. Display name of the taxonomy. At - most 200 bytes when encoded in UTF-8. - description (str): - Description of the serialized taxonomy. At - most 2000 bytes when encoded in UTF-8. If not - set, defaults to an empty description. - policy_tags (MutableSequence[google.cloud.datacatalog_v1.types.SerializedPolicyTag]): - Top level policy tags associated with the - taxonomy, if any. - activated_policy_types (MutableSequence[google.cloud.datacatalog_v1.types.Taxonomy.PolicyType]): - A list of policy types that are activated per - taxonomy. - """ - - display_name: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - policy_tags: MutableSequence['SerializedPolicyTag'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='SerializedPolicyTag', - ) - activated_policy_types: MutableSequence[policytagmanager.Taxonomy.PolicyType] = proto.RepeatedField( - proto.ENUM, - number=4, - enum=policytagmanager.Taxonomy.PolicyType, - ) - - -class SerializedPolicyTag(proto.Message): - r"""A nested protocol buffer that represents a policy tag and all - its descendants. - - Attributes: - policy_tag (str): - Resource name of the policy tag. - - This field is ignored when calling ``ImportTaxonomies``. - display_name (str): - Required. Display name of the policy tag. At - most 200 bytes when encoded in UTF-8. - description (str): - Description of the serialized policy tag. At - most 2000 bytes when encoded in UTF-8. If not - set, defaults to an empty description. - child_policy_tags (MutableSequence[google.cloud.datacatalog_v1.types.SerializedPolicyTag]): - Children of the policy tag, if any. - """ - - policy_tag: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - child_policy_tags: MutableSequence['SerializedPolicyTag'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='SerializedPolicyTag', - ) - - -class ReplaceTaxonomyRequest(proto.Message): - r"""Request message for - [ReplaceTaxonomy][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ReplaceTaxonomy]. - - Attributes: - name (str): - Required. Resource name of the taxonomy to - update. - serialized_taxonomy (google.cloud.datacatalog_v1.types.SerializedTaxonomy): - Required. Taxonomy to update along with its - child policy tags. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - serialized_taxonomy: 'SerializedTaxonomy' = proto.Field( - proto.MESSAGE, - number=2, - message='SerializedTaxonomy', - ) - - -class ImportTaxonomiesRequest(proto.Message): - r"""Request message for - [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. Resource name of project that the - imported taxonomies will belong to. - inline_source (google.cloud.datacatalog_v1.types.InlineSource): - Inline source taxonomy to import. - - This field is a member of `oneof`_ ``source``. - cross_regional_source (google.cloud.datacatalog_v1.types.CrossRegionalSource): - Cross-regional source taxonomy to import. - - This field is a member of `oneof`_ ``source``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - inline_source: 'InlineSource' = proto.Field( - proto.MESSAGE, - number=2, - oneof='source', - message='InlineSource', - ) - cross_regional_source: 'CrossRegionalSource' = proto.Field( - proto.MESSAGE, - number=3, - oneof='source', - message='CrossRegionalSource', - ) - - -class InlineSource(proto.Message): - r"""Inline source containing taxonomies to import. - - Attributes: - taxonomies (MutableSequence[google.cloud.datacatalog_v1.types.SerializedTaxonomy]): - Required. Taxonomies to import. - """ - - taxonomies: MutableSequence['SerializedTaxonomy'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='SerializedTaxonomy', - ) - - -class CrossRegionalSource(proto.Message): - r"""Cross-regional source used to import an existing taxonomy - into a different region. - - Attributes: - taxonomy (str): - Required. The resource name of the source - taxonomy to import. - """ - - taxonomy: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ImportTaxonomiesResponse(proto.Message): - r"""Response message for - [ImportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies]. - - Attributes: - taxonomies (MutableSequence[google.cloud.datacatalog_v1.types.Taxonomy]): - Imported taxonomies. - """ - - taxonomies: MutableSequence[policytagmanager.Taxonomy] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=policytagmanager.Taxonomy, - ) - - -class ExportTaxonomiesRequest(proto.Message): - r"""Request message for - [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. Resource name of the project that - the exported taxonomies belong to. - taxonomies (MutableSequence[str]): - Required. Resource names of the taxonomies to - export. - serialized_taxonomies (bool): - Serialized export taxonomies that contain all - the policy tags as nested protocol buffers. - - This field is a member of `oneof`_ ``destination``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - taxonomies: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - serialized_taxonomies: bool = proto.Field( - proto.BOOL, - number=3, - oneof='destination', - ) - - -class ExportTaxonomiesResponse(proto.Message): - r"""Response message for - [ExportTaxonomies][google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies]. - - Attributes: - taxonomies (MutableSequence[google.cloud.datacatalog_v1.types.SerializedTaxonomy]): - List of taxonomies and policy tags as nested - protocol buffers. - """ - - taxonomies: MutableSequence['SerializedTaxonomy'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='SerializedTaxonomy', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/schema.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/schema.py deleted file mode 100644 index 3a82e77b213e..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/schema.py +++ /dev/null @@ -1,204 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1', - manifest={ - 'Schema', - 'ColumnSchema', - }, -) - - -class Schema(proto.Message): - r"""Represents a schema, for example, a BigQuery, GoogleSQL, or - Avro schema. - - Attributes: - columns (MutableSequence[google.cloud.datacatalog_v1.types.ColumnSchema]): - The unified GoogleSQL-like schema of columns. - - The overall maximum number of columns and nested - columns is 10,000. The maximum nested depth is - 15 levels. - """ - - columns: MutableSequence['ColumnSchema'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='ColumnSchema', - ) - - -class ColumnSchema(proto.Message): - r"""A column within a schema. Columns can be nested inside - other columns. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - column (str): - Required. Name of the column. - - Must be a UTF-8 string without dots (.). - The maximum size is 64 bytes. - type_ (str): - Required. Type of the column. - - Must be a UTF-8 string with the maximum size of - 128 bytes. - description (str): - Optional. Description of the column. Default - value is an empty string. - The description must be a UTF-8 string with the - maximum size of 2000 bytes. - mode (str): - Optional. A column's mode indicates whether values in this - column are required, nullable, or repeated. - - Only ``NULLABLE``, ``REQUIRED``, and ``REPEATED`` values are - supported. Default mode is ``NULLABLE``. - default_value (str): - Optional. Default value for the column. - ordinal_position (int): - Optional. Ordinal position - highest_indexing_type (google.cloud.datacatalog_v1.types.ColumnSchema.IndexingType): - Optional. Most important inclusion of this - column. - subcolumns (MutableSequence[google.cloud.datacatalog_v1.types.ColumnSchema]): - Optional. Schema of sub-columns. A column can - have zero or more sub-columns. - looker_column_spec (google.cloud.datacatalog_v1.types.ColumnSchema.LookerColumnSpec): - Looker specific column info of this column. - - This field is a member of `oneof`_ ``system_spec``. - gc_rule (str): - Optional. Garbage collection policy for the - column or column family. Applies to systems like - Cloud Bigtable. - """ - class IndexingType(proto.Enum): - r"""Specifies inclusion of the column in an index - - Values: - INDEXING_TYPE_UNSPECIFIED (0): - Unspecified. - INDEXING_TYPE_NONE (1): - Column not a part of an index. - INDEXING_TYPE_NON_UNIQUE (2): - Column Part of non unique index. - INDEXING_TYPE_UNIQUE (3): - Column part of unique index. - INDEXING_TYPE_PRIMARY_KEY (4): - Column part of the primary key. - """ - INDEXING_TYPE_UNSPECIFIED = 0 - INDEXING_TYPE_NONE = 1 - INDEXING_TYPE_NON_UNIQUE = 2 - INDEXING_TYPE_UNIQUE = 3 - INDEXING_TYPE_PRIMARY_KEY = 4 - - class LookerColumnSpec(proto.Message): - r"""Column info specific to Looker System. - - Attributes: - type_ (google.cloud.datacatalog_v1.types.ColumnSchema.LookerColumnSpec.LookerColumnType): - Looker specific column type of this column. - """ - class LookerColumnType(proto.Enum): - r"""Column type in Looker. - - Values: - LOOKER_COLUMN_TYPE_UNSPECIFIED (0): - Unspecified. - DIMENSION (1): - Dimension. - DIMENSION_GROUP (2): - Dimension group - parent for Dimension. - FILTER (3): - Filter. - MEASURE (4): - Measure. - PARAMETER (5): - Parameter. - """ - LOOKER_COLUMN_TYPE_UNSPECIFIED = 0 - DIMENSION = 1 - DIMENSION_GROUP = 2 - FILTER = 3 - MEASURE = 4 - PARAMETER = 5 - - type_: 'ColumnSchema.LookerColumnSpec.LookerColumnType' = proto.Field( - proto.ENUM, - number=1, - enum='ColumnSchema.LookerColumnSpec.LookerColumnType', - ) - - column: str = proto.Field( - proto.STRING, - number=6, - ) - type_: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - mode: str = proto.Field( - proto.STRING, - number=3, - ) - default_value: str = proto.Field( - proto.STRING, - number=8, - ) - ordinal_position: int = proto.Field( - proto.INT32, - number=9, - ) - highest_indexing_type: IndexingType = proto.Field( - proto.ENUM, - number=10, - enum=IndexingType, - ) - subcolumns: MutableSequence['ColumnSchema'] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='ColumnSchema', - ) - looker_column_spec: LookerColumnSpec = proto.Field( - proto.MESSAGE, - number=18, - oneof='system_spec', - message=LookerColumnSpec, - ) - gc_rule: str = proto.Field( - proto.STRING, - number=11, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/search.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/search.py deleted file mode 100644 index 56747a5be5a1..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/search.py +++ /dev/null @@ -1,183 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.datacatalog_v1.types import common -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1', - manifest={ - 'SearchResultType', - 'SearchCatalogResult', - }, -) - - -class SearchResultType(proto.Enum): - r"""The resource types that can be returned in search results. - - Values: - SEARCH_RESULT_TYPE_UNSPECIFIED (0): - Default unknown type. - ENTRY (1): - An [Entry][google.cloud.datacatalog.v1.Entry]. - TAG_TEMPLATE (2): - A [TagTemplate][google.cloud.datacatalog.v1.TagTemplate]. - ENTRY_GROUP (3): - An [EntryGroup][google.cloud.datacatalog.v1.EntryGroup]. - """ - SEARCH_RESULT_TYPE_UNSPECIFIED = 0 - ENTRY = 1 - TAG_TEMPLATE = 2 - ENTRY_GROUP = 3 - - -class SearchCatalogResult(proto.Message): - r"""Result in the response to a search request. - - Each result captures details of one entry that matches the - search. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - search_result_type (google.cloud.datacatalog_v1.types.SearchResultType): - Type of the search result. - - You can use this field to determine which get - method to call to fetch the full resource. - search_result_subtype (str): - Sub-type of the search result. - - A dot-delimited full type of the resource. The same type you - specify in the ``type`` search predicate. - - Examples: ``entry.table``, ``entry.dataStream``, - ``tagTemplate``. - relative_resource_name (str): - The relative name of the resource in URL format. - - Examples: - - - ``projects/{PROJECT_ID}/locations/{LOCATION_ID}/entryGroups/{ENTRY_GROUP_ID}/entries/{ENTRY_ID}`` - - ``projects/{PROJECT_ID}/tagTemplates/{TAG_TEMPLATE_ID}`` - linked_resource (str): - The full name of the Google Cloud resource the entry belongs - to. - - For more information, see [Full Resource Name] - (/apis/design/resource_names#full_resource_name). - - Example: - - ``//bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID`` - modify_time (google.protobuf.timestamp_pb2.Timestamp): - The last modification timestamp of the entry - in the source system. - integrated_system (google.cloud.datacatalog_v1.types.IntegratedSystem): - Output only. The source system that Data - Catalog automatically integrates with, such as - BigQuery, Cloud Pub/Sub, or Dataproc Metastore. - - This field is a member of `oneof`_ ``system``. - user_specified_system (str): - Custom source system that you can manually - integrate Data Catalog with. - - This field is a member of `oneof`_ ``system``. - fully_qualified_name (str): - Fully qualified name (FQN) of the resource. - - FQNs take two forms: - - - For non-regionalized resources: - - ``{SYSTEM}:{PROJECT}.{PATH_TO_RESOURCE_SEPARATED_WITH_DOTS}`` - - - For regionalized resources: - - ``{SYSTEM}:{PROJECT}.{LOCATION_ID}.{PATH_TO_RESOURCE_SEPARATED_WITH_DOTS}`` - - Example for a DPMS table: - - ``dataproc_metastore:PROJECT_ID.LOCATION_ID.INSTANCE_ID.DATABASE_ID.TABLE_ID`` - display_name (str): - The display name of the result. - description (str): - Entry description that can consist of several - sentences or paragraphs that describe entry - contents. - """ - - search_result_type: 'SearchResultType' = proto.Field( - proto.ENUM, - number=1, - enum='SearchResultType', - ) - search_result_subtype: str = proto.Field( - proto.STRING, - number=2, - ) - relative_resource_name: str = proto.Field( - proto.STRING, - number=3, - ) - linked_resource: str = proto.Field( - proto.STRING, - number=4, - ) - modify_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - integrated_system: common.IntegratedSystem = proto.Field( - proto.ENUM, - number=8, - oneof='system', - enum=common.IntegratedSystem, - ) - user_specified_system: str = proto.Field( - proto.STRING, - number=9, - oneof='system', - ) - fully_qualified_name: str = proto.Field( - proto.STRING, - number=10, - ) - display_name: str = proto.Field( - proto.STRING, - number=12, - ) - description: str = proto.Field( - proto.STRING, - number=13, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/table_spec.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/table_spec.py deleted file mode 100644 index 0441bde6862a..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/table_spec.py +++ /dev/null @@ -1,178 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1', - manifest={ - 'TableSourceType', - 'BigQueryTableSpec', - 'ViewSpec', - 'TableSpec', - 'BigQueryDateShardedSpec', - }, -) - - -class TableSourceType(proto.Enum): - r"""Table source type. - - Values: - TABLE_SOURCE_TYPE_UNSPECIFIED (0): - Default unknown type. - BIGQUERY_VIEW (2): - Table view. - BIGQUERY_TABLE (5): - BigQuery native table. - BIGQUERY_MATERIALIZED_VIEW (7): - BigQuery materialized view. - """ - TABLE_SOURCE_TYPE_UNSPECIFIED = 0 - BIGQUERY_VIEW = 2 - BIGQUERY_TABLE = 5 - BIGQUERY_MATERIALIZED_VIEW = 7 - - -class BigQueryTableSpec(proto.Message): - r"""Describes a BigQuery table. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - table_source_type (google.cloud.datacatalog_v1.types.TableSourceType): - Output only. The table source type. - view_spec (google.cloud.datacatalog_v1.types.ViewSpec): - Table view specification. Populated only if the - ``table_source_type`` is ``BIGQUERY_VIEW``. - - This field is a member of `oneof`_ ``type_spec``. - table_spec (google.cloud.datacatalog_v1.types.TableSpec): - Specification of a BigQuery table. Populated only if the - ``table_source_type`` is ``BIGQUERY_TABLE``. - - This field is a member of `oneof`_ ``type_spec``. - """ - - table_source_type: 'TableSourceType' = proto.Field( - proto.ENUM, - number=1, - enum='TableSourceType', - ) - view_spec: 'ViewSpec' = proto.Field( - proto.MESSAGE, - number=2, - oneof='type_spec', - message='ViewSpec', - ) - table_spec: 'TableSpec' = proto.Field( - proto.MESSAGE, - number=3, - oneof='type_spec', - message='TableSpec', - ) - - -class ViewSpec(proto.Message): - r"""Table view specification. - - Attributes: - view_query (str): - Output only. The query that defines the table - view. - """ - - view_query: str = proto.Field( - proto.STRING, - number=1, - ) - - -class TableSpec(proto.Message): - r"""Normal BigQuery table specification. - - Attributes: - grouped_entry (str): - Output only. If the table is date-sharded, that is, it - matches the ``[prefix]YYYYMMDD`` name pattern, this field is - the Data Catalog resource name of the date-sharded grouped - entry. For example: - - ``projects/{PROJECT_ID}/locations/{LOCATION}/entrygroups/{ENTRY_GROUP_ID}/entries/{ENTRY_ID}``. - - Otherwise, ``grouped_entry`` is empty. - """ - - grouped_entry: str = proto.Field( - proto.STRING, - number=1, - ) - - -class BigQueryDateShardedSpec(proto.Message): - r"""Specification for a group of BigQuery tables with the - ``[prefix]YYYYMMDD`` name pattern. - - For more information, see [Introduction to partitioned tables] - (https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning_versus_sharding). - - Attributes: - dataset (str): - Output only. The Data Catalog resource name of the dataset - entry the current table belongs to. For example: - - ``projects/{PROJECT_ID}/locations/{LOCATION}/entrygroups/{ENTRY_GROUP_ID}/entries/{ENTRY_ID}``. - table_prefix (str): - Output only. The table name prefix of the shards. - - The name of any given shard is ``[table_prefix]YYYYMMDD``. - For example, for the ``MyTable20180101`` shard, the - ``table_prefix`` is ``MyTable``. - shard_count (int): - Output only. Total number of shards. - latest_shard_resource (str): - Output only. BigQuery resource name of the - latest shard. - """ - - dataset: str = proto.Field( - proto.STRING, - number=1, - ) - table_prefix: str = proto.Field( - proto.STRING, - number=2, - ) - shard_count: int = proto.Field( - proto.INT64, - number=3, - ) - latest_shard_resource: str = proto.Field( - proto.STRING, - number=4, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/tags.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/tags.py deleted file mode 100644 index b50679727e69..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/tags.py +++ /dev/null @@ -1,466 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1', - manifest={ - 'Tag', - 'TagField', - 'TagTemplate', - 'TagTemplateField', - 'FieldType', - }, -) - - -class Tag(proto.Message): - r"""Tags contain custom metadata and are attached to Data Catalog - resources. Tags conform with the specification of their tag - template. - - See `Data Catalog - IAM `__ for - information on the permissions needed to create or view tags. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - The resource name of the tag in URL format - where tag ID is a system-generated identifier. - - Note: The tag itself might not be stored in the - location specified in its name. - template (str): - Required. The resource name of the tag template this tag - uses. Example: - - ``projects/{PROJECT_ID}/locations/{LOCATION}/tagTemplates/{TAG_TEMPLATE_ID}`` - - This field cannot be modified after creation. - template_display_name (str): - Output only. The display name of the tag - template. - column (str): - Resources like entry can have schemas associated with them. - This scope allows you to attach tags to an individual column - based on that schema. - - To attach a tag to a nested column, separate column names - with a dot (``.``). Example: ``column.nested_column``. - - This field is a member of `oneof`_ ``scope``. - fields (MutableMapping[str, google.cloud.datacatalog_v1.types.TagField]): - Required. Maps the ID of a tag field to its - value and additional information about that - field. - - Tag template defines valid field IDs. A tag - must have at least 1 field and at most 500 - fields. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - template: str = proto.Field( - proto.STRING, - number=2, - ) - template_display_name: str = proto.Field( - proto.STRING, - number=5, - ) - column: str = proto.Field( - proto.STRING, - number=4, - oneof='scope', - ) - fields: MutableMapping[str, 'TagField'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=3, - message='TagField', - ) - - -class TagField(proto.Message): - r"""Contains the value and additional information on a field within a - [Tag][google.cloud.datacatalog.v1.Tag]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - display_name (str): - Output only. The display name of this field. - double_value (float): - The value of a tag field with a double type. - - This field is a member of `oneof`_ ``kind``. - string_value (str): - The value of a tag field with a string type. - - The maximum length is 2000 UTF-8 characters. - - This field is a member of `oneof`_ ``kind``. - bool_value (bool): - The value of a tag field with a boolean type. - - This field is a member of `oneof`_ ``kind``. - timestamp_value (google.protobuf.timestamp_pb2.Timestamp): - The value of a tag field with a timestamp - type. - - This field is a member of `oneof`_ ``kind``. - enum_value (google.cloud.datacatalog_v1.types.TagField.EnumValue): - The value of a tag field with an enum type. - - This value must be one of the allowed values - listed in this enum. - - This field is a member of `oneof`_ ``kind``. - richtext_value (str): - The value of a tag field with a rich text - type. - The maximum length is 10 MiB as this value holds - HTML descriptions including encoded images. The - maximum length of the text without images is 100 - KiB. - - This field is a member of `oneof`_ ``kind``. - order (int): - Output only. The order of this field with respect to other - fields in this tag. Can be set by - [Tag][google.cloud.datacatalog.v1.TagTemplateField.order]. - - For example, a higher value can indicate a more important - field. The value can be negative. Multiple fields can have - the same order, and field orders within a tag don't have to - be sequential. - """ - - class EnumValue(proto.Message): - r"""An enum value. - - Attributes: - display_name (str): - The display name of the enum value. - """ - - display_name: str = proto.Field( - proto.STRING, - number=1, - ) - - display_name: str = proto.Field( - proto.STRING, - number=1, - ) - double_value: float = proto.Field( - proto.DOUBLE, - number=2, - oneof='kind', - ) - string_value: str = proto.Field( - proto.STRING, - number=3, - oneof='kind', - ) - bool_value: bool = proto.Field( - proto.BOOL, - number=4, - oneof='kind', - ) - timestamp_value: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - oneof='kind', - message=timestamp_pb2.Timestamp, - ) - enum_value: EnumValue = proto.Field( - proto.MESSAGE, - number=6, - oneof='kind', - message=EnumValue, - ) - richtext_value: str = proto.Field( - proto.STRING, - number=8, - oneof='kind', - ) - order: int = proto.Field( - proto.INT32, - number=7, - ) - - -class TagTemplate(proto.Message): - r"""A tag template defines a tag that can have one or more typed fields. - - The template is used to create tags that are attached to Google - Cloud resources. [Tag template roles] - (https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) - provide permissions to create, edit, and use the template. For - example, see the [TagTemplate User] - (https://cloud.google.com/data-catalog/docs/how-to/template-user) - role that includes a permission to use the tag template to tag - resources. - - Attributes: - name (str): - The resource name of the tag template in URL - format. - Note: The tag template itself and its child - resources might not be stored in the location - specified in its name. - display_name (str): - Display name for this template. Defaults to an empty string. - - The name must contain only Unicode letters, numbers (0-9), - underscores (_), dashes (-), spaces ( ), and can't start or - end with spaces. The maximum length is 200 characters. - is_publicly_readable (bool): - Indicates whether tags created with this template are - public. Public tags do not require tag template access to - appear in [ListTags][google.cloud.datacatalog.v1.ListTags] - API response. - - Additionally, you can search for a public tag by value with - a simple search query in addition to using a ``tag:`` - predicate. - fields (MutableMapping[str, google.cloud.datacatalog_v1.types.TagTemplateField]): - Required. Map of tag template field IDs to the settings for - the field. This map is an exhaustive list of the allowed - fields. The map must contain at least one field and at most - 500 fields. - - The keys to this map are tag template field IDs. The IDs - have the following limitations: - - - Can contain uppercase and lowercase letters, numbers - (0-9) and underscores (_). - - Must be at least 1 character and at most 64 characters - long. - - Must start with a letter or underscore. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - is_publicly_readable: bool = proto.Field( - proto.BOOL, - number=5, - ) - fields: MutableMapping[str, 'TagTemplateField'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=3, - message='TagTemplateField', - ) - - -class TagTemplateField(proto.Message): - r"""The template for an individual field within a tag template. - - Attributes: - name (str): - Output only. The resource name of the tag template field in - URL format. Example: - - ``projects/{PROJECT_ID}/locations/{LOCATION}/tagTemplates/{TAG_TEMPLATE}/fields/{FIELD}`` - - Note: The tag template field itself might not be stored in - the location specified in its name. - - The name must contain only letters (a-z, A-Z), numbers - (0-9), or underscores (_), and must start with a letter or - underscore. The maximum length is 64 characters. - display_name (str): - The display name for this field. Defaults to an empty - string. - - The name must contain only Unicode letters, numbers (0-9), - underscores (_), dashes (-), spaces ( ), and can't start or - end with spaces. The maximum length is 200 characters. - type_ (google.cloud.datacatalog_v1.types.FieldType): - Required. The type of value this tag field - can contain. - is_required (bool): - If true, this field is required. Defaults to - false. - description (str): - The description for this field. Defaults to - an empty string. - order (int): - The order of this field with respect to other - fields in this tag template. - - For example, a higher value can indicate a more - important field. The value can be negative. - Multiple fields can have the same order and - field orders within a tag don't have to be - sequential. - """ - - name: str = proto.Field( - proto.STRING, - number=6, - ) - display_name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: 'FieldType' = proto.Field( - proto.MESSAGE, - number=2, - message='FieldType', - ) - is_required: bool = proto.Field( - proto.BOOL, - number=3, - ) - description: str = proto.Field( - proto.STRING, - number=4, - ) - order: int = proto.Field( - proto.INT32, - number=5, - ) - - -class FieldType(proto.Message): - r""" - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - primitive_type (google.cloud.datacatalog_v1.types.FieldType.PrimitiveType): - Primitive types, such as string, boolean, - etc. - - This field is a member of `oneof`_ ``type_decl``. - enum_type (google.cloud.datacatalog_v1.types.FieldType.EnumType): - An enum type. - - This field is a member of `oneof`_ ``type_decl``. - """ - class PrimitiveType(proto.Enum): - r""" - - Values: - PRIMITIVE_TYPE_UNSPECIFIED (0): - The default invalid value for a type. - DOUBLE (1): - A double precision number. - STRING (2): - An UTF-8 string. - BOOL (3): - A boolean value. - TIMESTAMP (4): - A timestamp. - RICHTEXT (5): - A Richtext description. - """ - PRIMITIVE_TYPE_UNSPECIFIED = 0 - DOUBLE = 1 - STRING = 2 - BOOL = 3 - TIMESTAMP = 4 - RICHTEXT = 5 - - class EnumType(proto.Message): - r""" - - Attributes: - allowed_values (MutableSequence[google.cloud.datacatalog_v1.types.FieldType.EnumType.EnumValue]): - The set of allowed values for this enum. - - This set must not be empty and can include up to 100 allowed - values. The display names of the values in this set must not - be empty and must be case-insensitively unique within this - set. - - The order of items in this set is preserved. This field can - be used to create, remove, and reorder enum values. To - rename enum values, use the - ``RenameTagTemplateFieldEnumValue`` method. - """ - - class EnumValue(proto.Message): - r""" - - Attributes: - display_name (str): - Required. The display name of the enum value. Must not be an - empty string. - - The name must contain only Unicode letters, numbers (0-9), - underscores (_), dashes (-), spaces ( ), and can't start or - end with spaces. The maximum length is 200 characters. - """ - - display_name: str = proto.Field( - proto.STRING, - number=1, - ) - - allowed_values: MutableSequence['FieldType.EnumType.EnumValue'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='FieldType.EnumType.EnumValue', - ) - - primitive_type: PrimitiveType = proto.Field( - proto.ENUM, - number=1, - oneof='type_decl', - enum=PrimitiveType, - ) - enum_type: EnumType = proto.Field( - proto.MESSAGE, - number=2, - oneof='type_decl', - message=EnumType, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/timestamps.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/timestamps.py deleted file mode 100644 index faa4bc694cb8..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/timestamps.py +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1', - manifest={ - 'SystemTimestamps', - }, -) - - -class SystemTimestamps(proto.Message): - r"""Timestamps associated with this resource in a particular - system. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Creation timestamp of the resource within the - given system. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Timestamp of the last modification of the - resource or its metadata within a given system. - - Note: Depending on the source system, not every - modification updates this timestamp. - For example, BigQuery timestamps every metadata - modification but not data or permission changes. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Expiration timestamp of the - resource within the given system. - Currently only applicable to BigQuery resources. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/usage.py b/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/usage.py deleted file mode 100644 index de40855f62db..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/google/cloud/datacatalog_v1/types/usage.py +++ /dev/null @@ -1,156 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1', - manifest={ - 'UsageStats', - 'CommonUsageStats', - 'UsageSignal', - }, -) - - -class UsageStats(proto.Message): - r"""Detailed statistics on the entry's usage. - - Usage statistics have the following limitations: - - - Only BigQuery tables have them. - - They only include BigQuery query jobs. - - They might be underestimated because wildcard table references - are not yet counted. For more information, see [Querying multiple - tables using a wildcard table] - (https://cloud.google.com/bigquery/docs/querying-wildcard-tables) - - Attributes: - total_completions (float): - The number of successful uses of the - underlying entry. - total_failures (float): - The number of failed attempts to use the - underlying entry. - total_cancellations (float): - The number of cancelled attempts to use the - underlying entry. - total_execution_time_for_completions_millis (float): - Total time spent only on successful uses, in - milliseconds. - """ - - total_completions: float = proto.Field( - proto.FLOAT, - number=1, - ) - total_failures: float = proto.Field( - proto.FLOAT, - number=2, - ) - total_cancellations: float = proto.Field( - proto.FLOAT, - number=3, - ) - total_execution_time_for_completions_millis: float = proto.Field( - proto.FLOAT, - number=4, - ) - - -class CommonUsageStats(proto.Message): - r"""Common statistics on the entry's usage. - - They can be set on any system. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - view_count (int): - View count in source system. - - This field is a member of `oneof`_ ``_view_count``. - """ - - view_count: int = proto.Field( - proto.INT64, - number=1, - optional=True, - ) - - -class UsageSignal(proto.Message): - r"""The set of all usage signals that Data Catalog stores. - - Note: Usually, these signals are updated daily. In rare cases, - an update may fail but will be performed again on the next day. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - update_time (google.protobuf.timestamp_pb2.Timestamp): - The end timestamp of the duration of usage - statistics. - usage_within_time_range (MutableMapping[str, google.cloud.datacatalog_v1.types.UsageStats]): - Output only. BigQuery usage statistics over each of the - predefined time ranges. - - Supported time ranges are ``{"24H", "7D", "30D"}``. - common_usage_within_time_range (MutableMapping[str, google.cloud.datacatalog_v1.types.CommonUsageStats]): - Common usage statistics over each of the predefined time - ranges. - - Supported time ranges are - ``{"24H", "7D", "30D", "Lifetime"}``. - favorite_count (int): - Favorite count in the source system. - - This field is a member of `oneof`_ ``_favorite_count``. - """ - - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - usage_within_time_range: MutableMapping[str, 'UsageStats'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=2, - message='UsageStats', - ) - common_usage_within_time_range: MutableMapping[str, 'CommonUsageStats'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=3, - message='CommonUsageStats', - ) - favorite_count: int = proto.Field( - proto.INT64, - number=4, - optional=True, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/mypy.ini b/owl-bot-staging/google-cloud-datacatalog/v1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/noxfile.py b/owl-bot-staging/google-cloud-datacatalog/v1/noxfile.py deleted file mode 100644 index 7c6730232962..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/noxfile.py +++ /dev/null @@ -1,184 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.11" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "lint_setup_py", -] - -@nox.session(python=ALL_PYTHON) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/datacatalog_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_async.py deleted file mode 100644 index 54c6f72a5136..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_async.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_CreateEntry_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_create_entry(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - entry = datacatalog_v1.Entry() - entry.type_ = "LOOK" - entry.integrated_system = "VERTEX_AI" - entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] - - request = datacatalog_v1.CreateEntryRequest( - parent="parent_value", - entry_id="entry_id_value", - entry=entry, - ) - - # Make the request - response = await client.create_entry(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_CreateEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_group_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_group_async.py deleted file mode 100644 index d057ba61b2e9..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_group_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_CreateEntryGroup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_create_entry_group(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.CreateEntryGroupRequest( - parent="parent_value", - entry_group_id="entry_group_id_value", - ) - - # Make the request - response = await client.create_entry_group(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_CreateEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_group_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_group_sync.py deleted file mode 100644 index 2ed262520b99..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_group_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_CreateEntryGroup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_create_entry_group(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.CreateEntryGroupRequest( - parent="parent_value", - entry_group_id="entry_group_id_value", - ) - - # Make the request - response = client.create_entry_group(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_CreateEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_sync.py deleted file mode 100644 index 16317907ef44..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_sync.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_CreateEntry_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_create_entry(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - entry = datacatalog_v1.Entry() - entry.type_ = "LOOK" - entry.integrated_system = "VERTEX_AI" - entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] - - request = datacatalog_v1.CreateEntryRequest( - parent="parent_value", - entry_id="entry_id_value", - entry=entry, - ) - - # Make the request - response = client.create_entry(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_CreateEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_async.py deleted file mode 100644 index 9200460d1744..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_CreateTag_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_create_tag(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - tag = datacatalog_v1.Tag() - tag.column = "column_value" - tag.template = "template_value" - - request = datacatalog_v1.CreateTagRequest( - parent="parent_value", - tag=tag, - ) - - # Make the request - response = await client.create_tag(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_CreateTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_sync.py deleted file mode 100644 index fca175bcfea8..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_CreateTag_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_create_tag(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - tag = datacatalog_v1.Tag() - tag.column = "column_value" - tag.template = "template_value" - - request = datacatalog_v1.CreateTagRequest( - parent="parent_value", - tag=tag, - ) - - # Make the request - response = client.create_tag(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_CreateTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_async.py deleted file mode 100644 index 386f19d98a39..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTagTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_CreateTagTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_create_tag_template(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.CreateTagTemplateRequest( - parent="parent_value", - tag_template_id="tag_template_id_value", - ) - - # Make the request - response = await client.create_tag_template(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_CreateTagTemplate_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_field_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_field_async.py deleted file mode 100644 index dbec9a803904..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_field_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTagTemplateField -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_CreateTagTemplateField_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_create_tag_template_field(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - tag_template_field = datacatalog_v1.TagTemplateField() - tag_template_field.type_.primitive_type = "RICHTEXT" - - request = datacatalog_v1.CreateTagTemplateFieldRequest( - parent="parent_value", - tag_template_field_id="tag_template_field_id_value", - tag_template_field=tag_template_field, - ) - - # Make the request - response = await client.create_tag_template_field(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_CreateTagTemplateField_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_field_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_field_sync.py deleted file mode 100644 index f91d58e30ec2..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_field_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTagTemplateField -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_CreateTagTemplateField_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_create_tag_template_field(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - tag_template_field = datacatalog_v1.TagTemplateField() - tag_template_field.type_.primitive_type = "RICHTEXT" - - request = datacatalog_v1.CreateTagTemplateFieldRequest( - parent="parent_value", - tag_template_field_id="tag_template_field_id_value", - tag_template_field=tag_template_field, - ) - - # Make the request - response = client.create_tag_template_field(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_CreateTagTemplateField_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_sync.py deleted file mode 100644 index 78c8204e54f0..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_tag_template_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTagTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_CreateTagTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_create_tag_template(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.CreateTagTemplateRequest( - parent="parent_value", - tag_template_id="tag_template_id_value", - ) - - # Make the request - response = client.create_tag_template(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_CreateTagTemplate_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_async.py deleted file mode 100644 index ee89eb7798c8..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_DeleteEntry_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_delete_entry(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteEntryRequest( - name="name_value", - ) - - # Make the request - await client.delete_entry(request=request) - - -# [END datacatalog_v1_generated_DataCatalog_DeleteEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_group_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_group_async.py deleted file mode 100644 index df2cfd22436e..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_group_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_DeleteEntryGroup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_delete_entry_group(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteEntryGroupRequest( - name="name_value", - ) - - # Make the request - await client.delete_entry_group(request=request) - - -# [END datacatalog_v1_generated_DataCatalog_DeleteEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_group_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_group_sync.py deleted file mode 100644 index 9f11c663f32e..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_group_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_DeleteEntryGroup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_delete_entry_group(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteEntryGroupRequest( - name="name_value", - ) - - # Make the request - client.delete_entry_group(request=request) - - -# [END datacatalog_v1_generated_DataCatalog_DeleteEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_sync.py deleted file mode 100644 index 5f7e0f63e554..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_entry_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_DeleteEntry_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_delete_entry(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteEntryRequest( - name="name_value", - ) - - # Make the request - client.delete_entry(request=request) - - -# [END datacatalog_v1_generated_DataCatalog_DeleteEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_async.py deleted file mode 100644 index 4cd043e360cd..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_DeleteTag_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_delete_tag(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteTagRequest( - name="name_value", - ) - - # Make the request - await client.delete_tag(request=request) - - -# [END datacatalog_v1_generated_DataCatalog_DeleteTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_sync.py deleted file mode 100644 index 34192ac63908..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_DeleteTag_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_delete_tag(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteTagRequest( - name="name_value", - ) - - # Make the request - client.delete_tag(request=request) - - -# [END datacatalog_v1_generated_DataCatalog_DeleteTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_async.py deleted file mode 100644 index 19cb502cd241..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTagTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_DeleteTagTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_delete_tag_template(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteTagTemplateRequest( - name="name_value", - force=True, - ) - - # Make the request - await client.delete_tag_template(request=request) - - -# [END datacatalog_v1_generated_DataCatalog_DeleteTagTemplate_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_field_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_field_async.py deleted file mode 100644 index 5c7c981e1594..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_field_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTagTemplateField -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_DeleteTagTemplateField_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_delete_tag_template_field(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteTagTemplateFieldRequest( - name="name_value", - force=True, - ) - - # Make the request - await client.delete_tag_template_field(request=request) - - -# [END datacatalog_v1_generated_DataCatalog_DeleteTagTemplateField_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_field_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_field_sync.py deleted file mode 100644 index 163cc051fc6b..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_field_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTagTemplateField -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_DeleteTagTemplateField_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_delete_tag_template_field(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteTagTemplateFieldRequest( - name="name_value", - force=True, - ) - - # Make the request - client.delete_tag_template_field(request=request) - - -# [END datacatalog_v1_generated_DataCatalog_DeleteTagTemplateField_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_sync.py deleted file mode 100644 index adf699293348..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_delete_tag_template_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTagTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_DeleteTagTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_delete_tag_template(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteTagTemplateRequest( - name="name_value", - force=True, - ) - - # Make the request - client.delete_tag_template(request=request) - - -# [END datacatalog_v1_generated_DataCatalog_DeleteTagTemplate_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_async.py deleted file mode 100644 index 302dd4280271..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_GetEntry_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_get_entry(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.GetEntryRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entry(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_GetEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_group_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_group_async.py deleted file mode 100644 index b56aafa11e72..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_group_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_GetEntryGroup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_get_entry_group(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.GetEntryGroupRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entry_group(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_GetEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_group_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_group_sync.py deleted file mode 100644 index 50314eef3272..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_group_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_GetEntryGroup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_get_entry_group(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.GetEntryGroupRequest( - name="name_value", - ) - - # Make the request - response = client.get_entry_group(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_GetEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_sync.py deleted file mode 100644 index 8e48e4aff6a7..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_entry_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_GetEntry_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_get_entry(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.GetEntryRequest( - name="name_value", - ) - - # Make the request - response = client.get_entry(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_GetEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_iam_policy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_iam_policy_async.py deleted file mode 100644 index 88a0c41f6a12..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_GetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_get_iam_policy(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_GetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_iam_policy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_iam_policy_sync.py deleted file mode 100644 index 4d74494edaae..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_GetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_get_iam_policy(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_GetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_tag_template_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_tag_template_async.py deleted file mode 100644 index 690f210fb2c8..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_tag_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTagTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_GetTagTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_get_tag_template(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.GetTagTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_tag_template(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_GetTagTemplate_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_tag_template_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_tag_template_sync.py deleted file mode 100644 index 13727551bbaf..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_get_tag_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTagTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_GetTagTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_get_tag_template(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.GetTagTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_tag_template(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_GetTagTemplate_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_import_entries_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_import_entries_async.py deleted file mode 100644 index 2b6bf08da25f..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_import_entries_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ImportEntries -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_ImportEntries_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_import_entries(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.ImportEntriesRequest( - gcs_bucket_path="gcs_bucket_path_value", - parent="parent_value", - ) - - # Make the request - operation = client.import_entries(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_ImportEntries_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_import_entries_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_import_entries_sync.py deleted file mode 100644 index bc77b4ce6cff..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_import_entries_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ImportEntries -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_ImportEntries_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_import_entries(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.ImportEntriesRequest( - gcs_bucket_path="gcs_bucket_path_value", - parent="parent_value", - ) - - # Make the request - operation = client.import_entries(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_ImportEntries_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entries_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entries_async.py deleted file mode 100644 index c9b1a9b43b12..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entries_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntries -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_ListEntries_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_list_entries(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.ListEntriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entries(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END datacatalog_v1_generated_DataCatalog_ListEntries_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entries_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entries_sync.py deleted file mode 100644 index d035990417ec..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entries_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntries -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_ListEntries_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_list_entries(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.ListEntriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entries(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END datacatalog_v1_generated_DataCatalog_ListEntries_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entry_groups_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entry_groups_async.py deleted file mode 100644 index c6eb9896d1b2..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entry_groups_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntryGroups -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_ListEntryGroups_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_list_entry_groups(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.ListEntryGroupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_groups(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END datacatalog_v1_generated_DataCatalog_ListEntryGroups_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entry_groups_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entry_groups_sync.py deleted file mode 100644 index 9332af6b72f8..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_entry_groups_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntryGroups -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_ListEntryGroups_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_list_entry_groups(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.ListEntryGroupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_groups(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END datacatalog_v1_generated_DataCatalog_ListEntryGroups_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_tags_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_tags_async.py deleted file mode 100644 index ced100b01580..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_tags_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTags -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_ListTags_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_list_tags(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.ListTagsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tags(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END datacatalog_v1_generated_DataCatalog_ListTags_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_tags_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_tags_sync.py deleted file mode 100644 index 972b8001db6a..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_list_tags_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTags -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_ListTags_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_list_tags(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.ListTagsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tags(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END datacatalog_v1_generated_DataCatalog_ListTags_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_lookup_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_lookup_entry_async.py deleted file mode 100644 index 7367122315bd..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_lookup_entry_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for LookupEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_LookupEntry_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_lookup_entry(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.LookupEntryRequest( - linked_resource="linked_resource_value", - ) - - # Make the request - response = await client.lookup_entry(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_LookupEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_lookup_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_lookup_entry_sync.py deleted file mode 100644 index e63535f74323..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_lookup_entry_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for LookupEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_LookupEntry_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_lookup_entry(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.LookupEntryRequest( - linked_resource="linked_resource_value", - ) - - # Make the request - response = client.lookup_entry(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_LookupEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_contacts_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_contacts_async.py deleted file mode 100644 index e90b18d8ec9a..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_contacts_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ModifyEntryContacts -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_ModifyEntryContacts_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_modify_entry_contacts(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.ModifyEntryContactsRequest( - name="name_value", - ) - - # Make the request - response = await client.modify_entry_contacts(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_ModifyEntryContacts_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_contacts_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_contacts_sync.py deleted file mode 100644 index 3c62b038b46c..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_contacts_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ModifyEntryContacts -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_ModifyEntryContacts_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_modify_entry_contacts(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.ModifyEntryContactsRequest( - name="name_value", - ) - - # Make the request - response = client.modify_entry_contacts(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_ModifyEntryContacts_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_overview_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_overview_async.py deleted file mode 100644 index bddcd6e48ece..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_overview_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ModifyEntryOverview -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_ModifyEntryOverview_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_modify_entry_overview(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.ModifyEntryOverviewRequest( - name="name_value", - ) - - # Make the request - response = await client.modify_entry_overview(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_ModifyEntryOverview_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_overview_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_overview_sync.py deleted file mode 100644 index cc50d86d6d43..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_modify_entry_overview_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ModifyEntryOverview -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_ModifyEntryOverview_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_modify_entry_overview(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.ModifyEntryOverviewRequest( - name="name_value", - ) - - # Make the request - response = client.modify_entry_overview(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_ModifyEntryOverview_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_reconcile_tags_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_reconcile_tags_async.py deleted file mode 100644 index d03e830e5eeb..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_reconcile_tags_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReconcileTags -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_ReconcileTags_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_reconcile_tags(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.ReconcileTagsRequest( - parent="parent_value", - tag_template="tag_template_value", - ) - - # Make the request - operation = client.reconcile_tags(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_ReconcileTags_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_reconcile_tags_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_reconcile_tags_sync.py deleted file mode 100644 index db7d90ef4d74..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_reconcile_tags_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReconcileTags -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_ReconcileTags_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_reconcile_tags(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.ReconcileTagsRequest( - parent="parent_value", - tag_template="tag_template_value", - ) - - # Make the request - operation = client.reconcile_tags(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_ReconcileTags_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_async.py deleted file mode 100644 index becfd2d292b9..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RenameTagTemplateField -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_RenameTagTemplateField_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_rename_tag_template_field(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.RenameTagTemplateFieldRequest( - name="name_value", - new_tag_template_field_id="new_tag_template_field_id_value", - ) - - # Make the request - response = await client.rename_tag_template_field(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_RenameTagTemplateField_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_async.py deleted file mode 100644 index 2f2bd73d10ce..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RenameTagTemplateFieldEnumValue -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_rename_tag_template_field_enum_value(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.RenameTagTemplateFieldEnumValueRequest( - name="name_value", - new_enum_value_display_name="new_enum_value_display_name_value", - ) - - # Make the request - response = await client.rename_tag_template_field_enum_value(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py deleted file mode 100644 index 719a36507137..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RenameTagTemplateFieldEnumValue -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_rename_tag_template_field_enum_value(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.RenameTagTemplateFieldEnumValueRequest( - name="name_value", - new_enum_value_display_name="new_enum_value_display_name_value", - ) - - # Make the request - response = client.rename_tag_template_field_enum_value(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_sync.py deleted file mode 100644 index 722704f3f2b2..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_rename_tag_template_field_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RenameTagTemplateField -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_RenameTagTemplateField_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_rename_tag_template_field(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.RenameTagTemplateFieldRequest( - name="name_value", - new_tag_template_field_id="new_tag_template_field_id_value", - ) - - # Make the request - response = client.rename_tag_template_field(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_RenameTagTemplateField_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_search_catalog_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_search_catalog_async.py deleted file mode 100644 index 62bb1d15bb0c..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_search_catalog_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchCatalog -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_SearchCatalog_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_search_catalog(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.SearchCatalogRequest( - ) - - # Make the request - page_result = client.search_catalog(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END datacatalog_v1_generated_DataCatalog_SearchCatalog_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_search_catalog_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_search_catalog_sync.py deleted file mode 100644 index 9e2673c05d52..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_search_catalog_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchCatalog -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_SearchCatalog_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_search_catalog(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.SearchCatalogRequest( - ) - - # Make the request - page_result = client.search_catalog(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END datacatalog_v1_generated_DataCatalog_SearchCatalog_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_iam_policy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_iam_policy_async.py deleted file mode 100644 index d9654f7affa6..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_SetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_set_iam_policy(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_SetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_iam_policy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_iam_policy_sync.py deleted file mode 100644 index 5dc66b715318..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_SetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_set_iam_policy(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_SetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_star_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_star_entry_async.py deleted file mode 100644 index 2370cfc3e188..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_star_entry_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StarEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_StarEntry_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_star_entry(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.StarEntryRequest( - name="name_value", - ) - - # Make the request - response = await client.star_entry(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_StarEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_star_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_star_entry_sync.py deleted file mode 100644 index 5bee56cb6e74..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_star_entry_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StarEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_StarEntry_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_star_entry(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.StarEntryRequest( - name="name_value", - ) - - # Make the request - response = client.star_entry(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_StarEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_test_iam_permissions_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_test_iam_permissions_async.py deleted file mode 100644 index 76be1c9b0ba7..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_test_iam_permissions_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_TestIamPermissions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_test_iam_permissions(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_TestIamPermissions_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_test_iam_permissions_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_test_iam_permissions_sync.py deleted file mode 100644 index 4d2bb42106f3..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_test_iam_permissions_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_TestIamPermissions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_test_iam_permissions(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_TestIamPermissions_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_unstar_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_unstar_entry_async.py deleted file mode 100644 index 6dd5a2120ba5..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_unstar_entry_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UnstarEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_UnstarEntry_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_unstar_entry(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.UnstarEntryRequest( - name="name_value", - ) - - # Make the request - response = await client.unstar_entry(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_UnstarEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_unstar_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_unstar_entry_sync.py deleted file mode 100644 index d1712e6863db..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_unstar_entry_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UnstarEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_UnstarEntry_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_unstar_entry(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.UnstarEntryRequest( - name="name_value", - ) - - # Make the request - response = client.unstar_entry(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_UnstarEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_async.py deleted file mode 100644 index edfcd1577e86..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_UpdateEntry_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_update_entry(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - entry = datacatalog_v1.Entry() - entry.type_ = "LOOK" - entry.integrated_system = "VERTEX_AI" - entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] - - request = datacatalog_v1.UpdateEntryRequest( - entry=entry, - ) - - # Make the request - response = await client.update_entry(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_UpdateEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_group_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_group_async.py deleted file mode 100644 index 5d3b8dd18792..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_group_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_UpdateEntryGroup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_update_entry_group(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.UpdateEntryGroupRequest( - ) - - # Make the request - response = await client.update_entry_group(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_UpdateEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_group_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_group_sync.py deleted file mode 100644 index f6ea137ae8bc..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_group_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_UpdateEntryGroup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_update_entry_group(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.UpdateEntryGroupRequest( - ) - - # Make the request - response = client.update_entry_group(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_UpdateEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_sync.py deleted file mode 100644 index ae8cedd82586..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_UpdateEntry_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_update_entry(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - entry = datacatalog_v1.Entry() - entry.type_ = "LOOK" - entry.integrated_system = "VERTEX_AI" - entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] - - request = datacatalog_v1.UpdateEntryRequest( - entry=entry, - ) - - # Make the request - response = client.update_entry(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_UpdateEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_async.py deleted file mode 100644 index 7ea7f33ebeb5..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_UpdateTag_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_update_tag(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - tag = datacatalog_v1.Tag() - tag.column = "column_value" - tag.template = "template_value" - - request = datacatalog_v1.UpdateTagRequest( - tag=tag, - ) - - # Make the request - response = await client.update_tag(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_UpdateTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_sync.py deleted file mode 100644 index 71cca56b1f26..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_UpdateTag_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_update_tag(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - tag = datacatalog_v1.Tag() - tag.column = "column_value" - tag.template = "template_value" - - request = datacatalog_v1.UpdateTagRequest( - tag=tag, - ) - - # Make the request - response = client.update_tag(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_UpdateTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_async.py deleted file mode 100644 index 1ab7aaacdfc6..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTagTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_UpdateTagTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_update_tag_template(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.UpdateTagTemplateRequest( - ) - - # Make the request - response = await client.update_tag_template(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_UpdateTagTemplate_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_field_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_field_async.py deleted file mode 100644 index 3da6486ccde2..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_field_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTagTemplateField -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_UpdateTagTemplateField_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_update_tag_template_field(): - # Create a client - client = datacatalog_v1.DataCatalogAsyncClient() - - # Initialize request argument(s) - tag_template_field = datacatalog_v1.TagTemplateField() - tag_template_field.type_.primitive_type = "RICHTEXT" - - request = datacatalog_v1.UpdateTagTemplateFieldRequest( - name="name_value", - tag_template_field=tag_template_field, - ) - - # Make the request - response = await client.update_tag_template_field(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_UpdateTagTemplateField_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_field_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_field_sync.py deleted file mode 100644 index fc4321e3f4ff..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_field_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTagTemplateField -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_UpdateTagTemplateField_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_update_tag_template_field(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - tag_template_field = datacatalog_v1.TagTemplateField() - tag_template_field.type_.primitive_type = "RICHTEXT" - - request = datacatalog_v1.UpdateTagTemplateFieldRequest( - name="name_value", - tag_template_field=tag_template_field, - ) - - # Make the request - response = client.update_tag_template_field(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_UpdateTagTemplateField_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_sync.py deleted file mode 100644 index 47b91a536181..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_tag_template_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTagTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_DataCatalog_UpdateTagTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_update_tag_template(): - # Create a client - client = datacatalog_v1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1.UpdateTagTemplateRequest( - ) - - # Make the request - response = client.update_tag_template(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_DataCatalog_UpdateTagTemplate_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_policy_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_policy_tag_async.py deleted file mode 100644 index c1b831646046..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_policy_tag_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreatePolicyTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_CreatePolicyTag_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_create_policy_tag(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.CreatePolicyTagRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_policy_tag(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManager_CreatePolicyTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_policy_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_policy_tag_sync.py deleted file mode 100644 index 6dba3cab9627..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_policy_tag_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreatePolicyTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_CreatePolicyTag_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_create_policy_tag(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1.CreatePolicyTagRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_policy_tag(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManager_CreatePolicyTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_taxonomy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_taxonomy_async.py deleted file mode 100644 index 5b4ea405a91a..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_taxonomy_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_CreateTaxonomy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_create_taxonomy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.CreateTaxonomyRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_taxonomy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManager_CreateTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_taxonomy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_taxonomy_sync.py deleted file mode 100644 index 8eeab4707753..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_create_taxonomy_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_CreateTaxonomy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_create_taxonomy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1.CreateTaxonomyRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_taxonomy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManager_CreateTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_async.py deleted file mode 100644 index 7da5d688f254..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeletePolicyTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_DeletePolicyTag_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_delete_policy_tag(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeletePolicyTagRequest( - name="name_value", - ) - - # Make the request - await client.delete_policy_tag(request=request) - - -# [END datacatalog_v1_generated_PolicyTagManager_DeletePolicyTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_sync.py deleted file mode 100644 index 6b6ef2e28a3b..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeletePolicyTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_DeletePolicyTag_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_delete_policy_tag(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeletePolicyTagRequest( - name="name_value", - ) - - # Make the request - client.delete_policy_tag(request=request) - - -# [END datacatalog_v1_generated_PolicyTagManager_DeletePolicyTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_async.py deleted file mode 100644 index e98b96756c6d..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_DeleteTaxonomy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_delete_taxonomy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteTaxonomyRequest( - name="name_value", - ) - - # Make the request - await client.delete_taxonomy(request=request) - - -# [END datacatalog_v1_generated_PolicyTagManager_DeleteTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_sync.py deleted file mode 100644 index d96da5017b32..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_DeleteTaxonomy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_delete_taxonomy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1.DeleteTaxonomyRequest( - name="name_value", - ) - - # Make the request - client.delete_taxonomy(request=request) - - -# [END datacatalog_v1_generated_PolicyTagManager_DeleteTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_iam_policy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_iam_policy_async.py deleted file mode 100644 index 1116ba587bae..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_GetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_get_iam_policy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManager_GetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_iam_policy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_iam_policy_sync.py deleted file mode 100644 index 2c81ed5468ed..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_GetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_get_iam_policy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManager_GetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_policy_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_policy_tag_async.py deleted file mode 100644 index 55fbf69a7c1f..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_policy_tag_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetPolicyTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_GetPolicyTag_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_get_policy_tag(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.GetPolicyTagRequest( - name="name_value", - ) - - # Make the request - response = await client.get_policy_tag(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManager_GetPolicyTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_policy_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_policy_tag_sync.py deleted file mode 100644 index c3c9fd284b3b..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_policy_tag_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetPolicyTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_GetPolicyTag_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_get_policy_tag(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1.GetPolicyTagRequest( - name="name_value", - ) - - # Make the request - response = client.get_policy_tag(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManager_GetPolicyTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_taxonomy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_taxonomy_async.py deleted file mode 100644 index 862f3c819340..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_taxonomy_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_GetTaxonomy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_get_taxonomy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.GetTaxonomyRequest( - name="name_value", - ) - - # Make the request - response = await client.get_taxonomy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManager_GetTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_taxonomy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_taxonomy_sync.py deleted file mode 100644 index 3861a49aab88..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_get_taxonomy_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_GetTaxonomy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_get_taxonomy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1.GetTaxonomyRequest( - name="name_value", - ) - - # Make the request - response = client.get_taxonomy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManager_GetTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_policy_tags_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_policy_tags_async.py deleted file mode 100644 index c8237c15a626..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_policy_tags_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListPolicyTags -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_ListPolicyTags_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_list_policy_tags(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.ListPolicyTagsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_policy_tags(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END datacatalog_v1_generated_PolicyTagManager_ListPolicyTags_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_policy_tags_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_policy_tags_sync.py deleted file mode 100644 index 00cd03844f48..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_policy_tags_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListPolicyTags -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_ListPolicyTags_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_list_policy_tags(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1.ListPolicyTagsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_policy_tags(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END datacatalog_v1_generated_PolicyTagManager_ListPolicyTags_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_taxonomies_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_taxonomies_async.py deleted file mode 100644 index f497579b1b1c..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_taxonomies_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTaxonomies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_ListTaxonomies_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_list_taxonomies(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.ListTaxonomiesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_taxonomies(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END datacatalog_v1_generated_PolicyTagManager_ListTaxonomies_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_taxonomies_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_taxonomies_sync.py deleted file mode 100644 index 11263b6d90c8..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_list_taxonomies_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTaxonomies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_ListTaxonomies_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_list_taxonomies(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1.ListTaxonomiesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_taxonomies(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END datacatalog_v1_generated_PolicyTagManager_ListTaxonomies_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_async.py deleted file mode 100644 index 8bbdbc179490..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExportTaxonomies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManagerSerialization_ExportTaxonomies_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_export_taxonomies(): - # Create a client - client = datacatalog_v1.PolicyTagManagerSerializationAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.ExportTaxonomiesRequest( - serialized_taxonomies=True, - parent="parent_value", - taxonomies=['taxonomies_value1', 'taxonomies_value2'], - ) - - # Make the request - response = await client.export_taxonomies(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManagerSerialization_ExportTaxonomies_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py deleted file mode 100644 index 1f30c159f594..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExportTaxonomies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManagerSerialization_ExportTaxonomies_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_export_taxonomies(): - # Create a client - client = datacatalog_v1.PolicyTagManagerSerializationClient() - - # Initialize request argument(s) - request = datacatalog_v1.ExportTaxonomiesRequest( - serialized_taxonomies=True, - parent="parent_value", - taxonomies=['taxonomies_value1', 'taxonomies_value2'], - ) - - # Make the request - response = client.export_taxonomies(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManagerSerialization_ExportTaxonomies_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_async.py deleted file mode 100644 index 2afe477870f2..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ImportTaxonomies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManagerSerialization_ImportTaxonomies_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_import_taxonomies(): - # Create a client - client = datacatalog_v1.PolicyTagManagerSerializationAsyncClient() - - # Initialize request argument(s) - inline_source = datacatalog_v1.InlineSource() - inline_source.taxonomies.display_name = "display_name_value" - - request = datacatalog_v1.ImportTaxonomiesRequest( - inline_source=inline_source, - parent="parent_value", - ) - - # Make the request - response = await client.import_taxonomies(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManagerSerialization_ImportTaxonomies_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py deleted file mode 100644 index 0f6462dbca8a..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ImportTaxonomies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManagerSerialization_ImportTaxonomies_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_import_taxonomies(): - # Create a client - client = datacatalog_v1.PolicyTagManagerSerializationClient() - - # Initialize request argument(s) - inline_source = datacatalog_v1.InlineSource() - inline_source.taxonomies.display_name = "display_name_value" - - request = datacatalog_v1.ImportTaxonomiesRequest( - inline_source=inline_source, - parent="parent_value", - ) - - # Make the request - response = client.import_taxonomies(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManagerSerialization_ImportTaxonomies_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_async.py deleted file mode 100644 index c1a6c68403b8..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReplaceTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManagerSerialization_ReplaceTaxonomy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_replace_taxonomy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerSerializationAsyncClient() - - # Initialize request argument(s) - serialized_taxonomy = datacatalog_v1.SerializedTaxonomy() - serialized_taxonomy.display_name = "display_name_value" - - request = datacatalog_v1.ReplaceTaxonomyRequest( - name="name_value", - serialized_taxonomy=serialized_taxonomy, - ) - - # Make the request - response = await client.replace_taxonomy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManagerSerialization_ReplaceTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_sync.py deleted file mode 100644 index 947acd23c470..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReplaceTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManagerSerialization_ReplaceTaxonomy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_replace_taxonomy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerSerializationClient() - - # Initialize request argument(s) - serialized_taxonomy = datacatalog_v1.SerializedTaxonomy() - serialized_taxonomy.display_name = "display_name_value" - - request = datacatalog_v1.ReplaceTaxonomyRequest( - name="name_value", - serialized_taxonomy=serialized_taxonomy, - ) - - # Make the request - response = client.replace_taxonomy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManagerSerialization_ReplaceTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_set_iam_policy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_set_iam_policy_async.py deleted file mode 100644 index 2bf7b337ab49..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_set_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_SetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_set_iam_policy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManager_SetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_set_iam_policy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_set_iam_policy_sync.py deleted file mode 100644 index 6703fdd8ef3a..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_set_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_SetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_set_iam_policy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManager_SetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_async.py deleted file mode 100644 index 7b1962ac8ca7..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_TestIamPermissions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_test_iam_permissions(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManager_TestIamPermissions_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_sync.py deleted file mode 100644 index b0ed75bd1818..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_TestIamPermissions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_test_iam_permissions(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManager_TestIamPermissions_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_policy_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_policy_tag_async.py deleted file mode 100644 index 8c6afcd5a236..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_policy_tag_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdatePolicyTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_UpdatePolicyTag_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_update_policy_tag(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.UpdatePolicyTagRequest( - ) - - # Make the request - response = await client.update_policy_tag(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManager_UpdatePolicyTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_policy_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_policy_tag_sync.py deleted file mode 100644 index e7ccc2ce0409..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_policy_tag_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdatePolicyTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_UpdatePolicyTag_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_update_policy_tag(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1.UpdatePolicyTagRequest( - ) - - # Make the request - response = client.update_policy_tag(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManager_UpdatePolicyTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_taxonomy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_taxonomy_async.py deleted file mode 100644 index 8f2e122eaaaa..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_taxonomy_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_UpdateTaxonomy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -async def sample_update_taxonomy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1.UpdateTaxonomyRequest( - ) - - # Make the request - response = await client.update_taxonomy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManager_UpdateTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_taxonomy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_taxonomy_sync.py deleted file mode 100644 index 0d88c2b173d5..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/datacatalog_v1_generated_policy_tag_manager_update_taxonomy_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1_generated_PolicyTagManager_UpdateTaxonomy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1 - - -def sample_update_taxonomy(): - # Create a client - client = datacatalog_v1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1.UpdateTaxonomyRequest( - ) - - # Make the request - response = client.update_taxonomy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1_generated_PolicyTagManager_UpdateTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json b/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json deleted file mode 100644 index 1659a652e62c..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json +++ /dev/null @@ -1,8111 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.datacatalog.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-datacatalog", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.create_entry_group", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.CreateEntryGroup", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "CreateEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.CreateEntryGroupRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entry_group_id", - "type": "str" - }, - { - "name": "entry_group", - "type": "google.cloud.datacatalog_v1.types.EntryGroup" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.EntryGroup", - "shortName": "create_entry_group" - }, - "description": "Sample for CreateEntryGroup", - "file": "datacatalog_v1_generated_data_catalog_create_entry_group_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_CreateEntryGroup_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_create_entry_group_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.create_entry_group", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.CreateEntryGroup", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "CreateEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.CreateEntryGroupRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entry_group_id", - "type": "str" - }, - { - "name": "entry_group", - "type": "google.cloud.datacatalog_v1.types.EntryGroup" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.EntryGroup", - "shortName": "create_entry_group" - }, - "description": "Sample for CreateEntryGroup", - "file": "datacatalog_v1_generated_data_catalog_create_entry_group_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_CreateEntryGroup_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_create_entry_group_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.create_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.CreateEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "CreateEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.CreateEntryRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entry_id", - "type": "str" - }, - { - "name": "entry", - "type": "google.cloud.datacatalog_v1.types.Entry" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.Entry", - "shortName": "create_entry" - }, - "description": "Sample for CreateEntry", - "file": "datacatalog_v1_generated_data_catalog_create_entry_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_CreateEntry_async", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_create_entry_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.create_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.CreateEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "CreateEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.CreateEntryRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entry_id", - "type": "str" - }, - { - "name": "entry", - "type": "google.cloud.datacatalog_v1.types.Entry" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.Entry", - "shortName": "create_entry" - }, - "description": "Sample for CreateEntry", - "file": "datacatalog_v1_generated_data_catalog_create_entry_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_CreateEntry_sync", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_create_entry_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.create_tag_template_field", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.CreateTagTemplateField", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "CreateTagTemplateField" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.CreateTagTemplateFieldRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "tag_template_field_id", - "type": "str" - }, - { - "name": "tag_template_field", - "type": "google.cloud.datacatalog_v1.types.TagTemplateField" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.TagTemplateField", - "shortName": "create_tag_template_field" - }, - "description": "Sample for CreateTagTemplateField", - "file": "datacatalog_v1_generated_data_catalog_create_tag_template_field_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_CreateTagTemplateField_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_create_tag_template_field_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.create_tag_template_field", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.CreateTagTemplateField", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "CreateTagTemplateField" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.CreateTagTemplateFieldRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "tag_template_field_id", - "type": "str" - }, - { - "name": "tag_template_field", - "type": "google.cloud.datacatalog_v1.types.TagTemplateField" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.TagTemplateField", - "shortName": "create_tag_template_field" - }, - "description": "Sample for CreateTagTemplateField", - "file": "datacatalog_v1_generated_data_catalog_create_tag_template_field_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_CreateTagTemplateField_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_create_tag_template_field_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.create_tag_template", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.CreateTagTemplate", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "CreateTagTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.CreateTagTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "tag_template_id", - "type": "str" - }, - { - "name": "tag_template", - "type": "google.cloud.datacatalog_v1.types.TagTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.TagTemplate", - "shortName": "create_tag_template" - }, - "description": "Sample for CreateTagTemplate", - "file": "datacatalog_v1_generated_data_catalog_create_tag_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_CreateTagTemplate_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_create_tag_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.create_tag_template", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.CreateTagTemplate", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "CreateTagTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.CreateTagTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "tag_template_id", - "type": "str" - }, - { - "name": "tag_template", - "type": "google.cloud.datacatalog_v1.types.TagTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.TagTemplate", - "shortName": "create_tag_template" - }, - "description": "Sample for CreateTagTemplate", - "file": "datacatalog_v1_generated_data_catalog_create_tag_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_CreateTagTemplate_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_create_tag_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.create_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.CreateTag", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "CreateTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.CreateTagRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "tag", - "type": "google.cloud.datacatalog_v1.types.Tag" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.Tag", - "shortName": "create_tag" - }, - "description": "Sample for CreateTag", - "file": "datacatalog_v1_generated_data_catalog_create_tag_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_CreateTag_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_create_tag_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.create_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.CreateTag", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "CreateTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.CreateTagRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "tag", - "type": "google.cloud.datacatalog_v1.types.Tag" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.Tag", - "shortName": "create_tag" - }, - "description": "Sample for CreateTag", - "file": "datacatalog_v1_generated_data_catalog_create_tag_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_CreateTag_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_create_tag_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.delete_entry_group", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.DeleteEntryGroup", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "DeleteEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.DeleteEntryGroupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_entry_group" - }, - "description": "Sample for DeleteEntryGroup", - "file": "datacatalog_v1_generated_data_catalog_delete_entry_group_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_DeleteEntryGroup_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_delete_entry_group_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.delete_entry_group", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.DeleteEntryGroup", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "DeleteEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.DeleteEntryGroupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_entry_group" - }, - "description": "Sample for DeleteEntryGroup", - "file": "datacatalog_v1_generated_data_catalog_delete_entry_group_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_DeleteEntryGroup_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_delete_entry_group_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.delete_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.DeleteEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "DeleteEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.DeleteEntryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_entry" - }, - "description": "Sample for DeleteEntry", - "file": "datacatalog_v1_generated_data_catalog_delete_entry_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_DeleteEntry_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_delete_entry_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.delete_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.DeleteEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "DeleteEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.DeleteEntryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_entry" - }, - "description": "Sample for DeleteEntry", - "file": "datacatalog_v1_generated_data_catalog_delete_entry_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_DeleteEntry_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_delete_entry_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.delete_tag_template_field", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.DeleteTagTemplateField", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "DeleteTagTemplateField" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.DeleteTagTemplateFieldRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "force", - "type": "bool" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_tag_template_field" - }, - "description": "Sample for DeleteTagTemplateField", - "file": "datacatalog_v1_generated_data_catalog_delete_tag_template_field_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_DeleteTagTemplateField_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_delete_tag_template_field_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.delete_tag_template_field", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.DeleteTagTemplateField", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "DeleteTagTemplateField" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.DeleteTagTemplateFieldRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "force", - "type": "bool" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_tag_template_field" - }, - "description": "Sample for DeleteTagTemplateField", - "file": "datacatalog_v1_generated_data_catalog_delete_tag_template_field_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_DeleteTagTemplateField_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_delete_tag_template_field_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.delete_tag_template", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.DeleteTagTemplate", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "DeleteTagTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.DeleteTagTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "force", - "type": "bool" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_tag_template" - }, - "description": "Sample for DeleteTagTemplate", - "file": "datacatalog_v1_generated_data_catalog_delete_tag_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_DeleteTagTemplate_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_delete_tag_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.delete_tag_template", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.DeleteTagTemplate", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "DeleteTagTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.DeleteTagTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "force", - "type": "bool" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_tag_template" - }, - "description": "Sample for DeleteTagTemplate", - "file": "datacatalog_v1_generated_data_catalog_delete_tag_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_DeleteTagTemplate_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_delete_tag_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.delete_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.DeleteTag", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "DeleteTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.DeleteTagRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_tag" - }, - "description": "Sample for DeleteTag", - "file": "datacatalog_v1_generated_data_catalog_delete_tag_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_DeleteTag_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_delete_tag_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.delete_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.DeleteTag", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "DeleteTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.DeleteTagRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_tag" - }, - "description": "Sample for DeleteTag", - "file": "datacatalog_v1_generated_data_catalog_delete_tag_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_DeleteTag_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_delete_tag_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.get_entry_group", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.GetEntryGroup", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "GetEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.GetEntryGroupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "read_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.EntryGroup", - "shortName": "get_entry_group" - }, - "description": "Sample for GetEntryGroup", - "file": "datacatalog_v1_generated_data_catalog_get_entry_group_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_GetEntryGroup_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_get_entry_group_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.get_entry_group", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.GetEntryGroup", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "GetEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.GetEntryGroupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "read_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.EntryGroup", - "shortName": "get_entry_group" - }, - "description": "Sample for GetEntryGroup", - "file": "datacatalog_v1_generated_data_catalog_get_entry_group_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_GetEntryGroup_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_get_entry_group_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.get_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.GetEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "GetEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.GetEntryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.Entry", - "shortName": "get_entry" - }, - "description": "Sample for GetEntry", - "file": "datacatalog_v1_generated_data_catalog_get_entry_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_GetEntry_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_get_entry_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.get_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.GetEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "GetEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.GetEntryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.Entry", - "shortName": "get_entry" - }, - "description": "Sample for GetEntry", - "file": "datacatalog_v1_generated_data_catalog_get_entry_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_GetEntry_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_get_entry_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.get_iam_policy", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.GetIamPolicy", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "datacatalog_v1_generated_data_catalog_get_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_GetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_get_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.get_iam_policy", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.GetIamPolicy", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "datacatalog_v1_generated_data_catalog_get_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_GetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_get_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.get_tag_template", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.GetTagTemplate", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "GetTagTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.GetTagTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.TagTemplate", - "shortName": "get_tag_template" - }, - "description": "Sample for GetTagTemplate", - "file": "datacatalog_v1_generated_data_catalog_get_tag_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_GetTagTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_get_tag_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.get_tag_template", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.GetTagTemplate", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "GetTagTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.GetTagTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.TagTemplate", - "shortName": "get_tag_template" - }, - "description": "Sample for GetTagTemplate", - "file": "datacatalog_v1_generated_data_catalog_get_tag_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_GetTagTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_get_tag_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.import_entries", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.ImportEntries", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "ImportEntries" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ImportEntriesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "import_entries" - }, - "description": "Sample for ImportEntries", - "file": "datacatalog_v1_generated_data_catalog_import_entries_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_ImportEntries_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_import_entries_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.import_entries", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.ImportEntries", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "ImportEntries" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ImportEntriesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "import_entries" - }, - "description": "Sample for ImportEntries", - "file": "datacatalog_v1_generated_data_catalog_import_entries_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_ImportEntries_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_import_entries_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.list_entries", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.ListEntries", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "ListEntries" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ListEntriesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.services.data_catalog.pagers.ListEntriesAsyncPager", - "shortName": "list_entries" - }, - "description": "Sample for ListEntries", - "file": "datacatalog_v1_generated_data_catalog_list_entries_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_ListEntries_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_list_entries_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.list_entries", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.ListEntries", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "ListEntries" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ListEntriesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.services.data_catalog.pagers.ListEntriesPager", - "shortName": "list_entries" - }, - "description": "Sample for ListEntries", - "file": "datacatalog_v1_generated_data_catalog_list_entries_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_ListEntries_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_list_entries_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.list_entry_groups", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.ListEntryGroups", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "ListEntryGroups" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ListEntryGroupsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.services.data_catalog.pagers.ListEntryGroupsAsyncPager", - "shortName": "list_entry_groups" - }, - "description": "Sample for ListEntryGroups", - "file": "datacatalog_v1_generated_data_catalog_list_entry_groups_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_ListEntryGroups_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_list_entry_groups_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.list_entry_groups", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.ListEntryGroups", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "ListEntryGroups" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ListEntryGroupsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.services.data_catalog.pagers.ListEntryGroupsPager", - "shortName": "list_entry_groups" - }, - "description": "Sample for ListEntryGroups", - "file": "datacatalog_v1_generated_data_catalog_list_entry_groups_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_ListEntryGroups_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_list_entry_groups_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.list_tags", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.ListTags", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "ListTags" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ListTagsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.services.data_catalog.pagers.ListTagsAsyncPager", - "shortName": "list_tags" - }, - "description": "Sample for ListTags", - "file": "datacatalog_v1_generated_data_catalog_list_tags_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_ListTags_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_list_tags_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.list_tags", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.ListTags", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "ListTags" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ListTagsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.services.data_catalog.pagers.ListTagsPager", - "shortName": "list_tags" - }, - "description": "Sample for ListTags", - "file": "datacatalog_v1_generated_data_catalog_list_tags_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_ListTags_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_list_tags_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.lookup_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.LookupEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "LookupEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.LookupEntryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.Entry", - "shortName": "lookup_entry" - }, - "description": "Sample for LookupEntry", - "file": "datacatalog_v1_generated_data_catalog_lookup_entry_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_LookupEntry_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_lookup_entry_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.lookup_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.LookupEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "LookupEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.LookupEntryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.Entry", - "shortName": "lookup_entry" - }, - "description": "Sample for LookupEntry", - "file": "datacatalog_v1_generated_data_catalog_lookup_entry_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_LookupEntry_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_lookup_entry_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.modify_entry_contacts", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.ModifyEntryContacts", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "ModifyEntryContacts" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ModifyEntryContactsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.Contacts", - "shortName": "modify_entry_contacts" - }, - "description": "Sample for ModifyEntryContacts", - "file": "datacatalog_v1_generated_data_catalog_modify_entry_contacts_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_ModifyEntryContacts_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_modify_entry_contacts_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.modify_entry_contacts", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.ModifyEntryContacts", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "ModifyEntryContacts" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ModifyEntryContactsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.Contacts", - "shortName": "modify_entry_contacts" - }, - "description": "Sample for ModifyEntryContacts", - "file": "datacatalog_v1_generated_data_catalog_modify_entry_contacts_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_ModifyEntryContacts_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_modify_entry_contacts_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.modify_entry_overview", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.ModifyEntryOverview", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "ModifyEntryOverview" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ModifyEntryOverviewRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.EntryOverview", - "shortName": "modify_entry_overview" - }, - "description": "Sample for ModifyEntryOverview", - "file": "datacatalog_v1_generated_data_catalog_modify_entry_overview_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_ModifyEntryOverview_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_modify_entry_overview_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.modify_entry_overview", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.ModifyEntryOverview", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "ModifyEntryOverview" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ModifyEntryOverviewRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.EntryOverview", - "shortName": "modify_entry_overview" - }, - "description": "Sample for ModifyEntryOverview", - "file": "datacatalog_v1_generated_data_catalog_modify_entry_overview_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_ModifyEntryOverview_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_modify_entry_overview_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.reconcile_tags", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.ReconcileTags", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "ReconcileTags" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ReconcileTagsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "reconcile_tags" - }, - "description": "Sample for ReconcileTags", - "file": "datacatalog_v1_generated_data_catalog_reconcile_tags_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_ReconcileTags_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_reconcile_tags_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.reconcile_tags", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.ReconcileTags", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "ReconcileTags" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ReconcileTagsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "reconcile_tags" - }, - "description": "Sample for ReconcileTags", - "file": "datacatalog_v1_generated_data_catalog_reconcile_tags_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_ReconcileTags_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_reconcile_tags_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.rename_tag_template_field_enum_value", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateFieldEnumValue", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "RenameTagTemplateFieldEnumValue" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.RenameTagTemplateFieldEnumValueRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "new_enum_value_display_name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.TagTemplateField", - "shortName": "rename_tag_template_field_enum_value" - }, - "description": "Sample for RenameTagTemplateFieldEnumValue", - "file": "datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.rename_tag_template_field_enum_value", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateFieldEnumValue", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "RenameTagTemplateFieldEnumValue" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.RenameTagTemplateFieldEnumValueRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "new_enum_value_display_name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.TagTemplateField", - "shortName": "rename_tag_template_field_enum_value" - }, - "description": "Sample for RenameTagTemplateFieldEnumValue", - "file": "datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.rename_tag_template_field", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateField", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "RenameTagTemplateField" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.RenameTagTemplateFieldRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "new_tag_template_field_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.TagTemplateField", - "shortName": "rename_tag_template_field" - }, - "description": "Sample for RenameTagTemplateField", - "file": "datacatalog_v1_generated_data_catalog_rename_tag_template_field_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_RenameTagTemplateField_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_rename_tag_template_field_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.rename_tag_template_field", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateField", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "RenameTagTemplateField" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.RenameTagTemplateFieldRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "new_tag_template_field_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.TagTemplateField", - "shortName": "rename_tag_template_field" - }, - "description": "Sample for RenameTagTemplateField", - "file": "datacatalog_v1_generated_data_catalog_rename_tag_template_field_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_RenameTagTemplateField_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_rename_tag_template_field_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.search_catalog", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.SearchCatalog", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "SearchCatalog" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.SearchCatalogRequest" - }, - { - "name": "scope", - "type": "google.cloud.datacatalog_v1.types.SearchCatalogRequest.Scope" - }, - { - "name": "query", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.services.data_catalog.pagers.SearchCatalogAsyncPager", - "shortName": "search_catalog" - }, - "description": "Sample for SearchCatalog", - "file": "datacatalog_v1_generated_data_catalog_search_catalog_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_SearchCatalog_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_search_catalog_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.search_catalog", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.SearchCatalog", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "SearchCatalog" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.SearchCatalogRequest" - }, - { - "name": "scope", - "type": "google.cloud.datacatalog_v1.types.SearchCatalogRequest.Scope" - }, - { - "name": "query", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.services.data_catalog.pagers.SearchCatalogPager", - "shortName": "search_catalog" - }, - "description": "Sample for SearchCatalog", - "file": "datacatalog_v1_generated_data_catalog_search_catalog_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_SearchCatalog_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_search_catalog_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.set_iam_policy", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.SetIamPolicy", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "datacatalog_v1_generated_data_catalog_set_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_SetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_set_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.set_iam_policy", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.SetIamPolicy", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "datacatalog_v1_generated_data_catalog_set_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_SetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_set_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.star_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.StarEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "StarEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.StarEntryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.StarEntryResponse", - "shortName": "star_entry" - }, - "description": "Sample for StarEntry", - "file": "datacatalog_v1_generated_data_catalog_star_entry_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_StarEntry_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_star_entry_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.star_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.StarEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "StarEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.StarEntryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.StarEntryResponse", - "shortName": "star_entry" - }, - "description": "Sample for StarEntry", - "file": "datacatalog_v1_generated_data_catalog_star_entry_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_StarEntry_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_star_entry_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.test_iam_permissions", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.TestIamPermissions", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "datacatalog_v1_generated_data_catalog_test_iam_permissions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_TestIamPermissions_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_test_iam_permissions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.test_iam_permissions", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.TestIamPermissions", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "datacatalog_v1_generated_data_catalog_test_iam_permissions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_TestIamPermissions_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_test_iam_permissions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.unstar_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.UnstarEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "UnstarEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.UnstarEntryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.UnstarEntryResponse", - "shortName": "unstar_entry" - }, - "description": "Sample for UnstarEntry", - "file": "datacatalog_v1_generated_data_catalog_unstar_entry_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_UnstarEntry_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_unstar_entry_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.unstar_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.UnstarEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "UnstarEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.UnstarEntryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.UnstarEntryResponse", - "shortName": "unstar_entry" - }, - "description": "Sample for UnstarEntry", - "file": "datacatalog_v1_generated_data_catalog_unstar_entry_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_UnstarEntry_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_unstar_entry_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.update_entry_group", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.UpdateEntryGroup", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "UpdateEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.UpdateEntryGroupRequest" - }, - { - "name": "entry_group", - "type": "google.cloud.datacatalog_v1.types.EntryGroup" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.EntryGroup", - "shortName": "update_entry_group" - }, - "description": "Sample for UpdateEntryGroup", - "file": "datacatalog_v1_generated_data_catalog_update_entry_group_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_UpdateEntryGroup_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_update_entry_group_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.update_entry_group", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.UpdateEntryGroup", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "UpdateEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.UpdateEntryGroupRequest" - }, - { - "name": "entry_group", - "type": "google.cloud.datacatalog_v1.types.EntryGroup" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.EntryGroup", - "shortName": "update_entry_group" - }, - "description": "Sample for UpdateEntryGroup", - "file": "datacatalog_v1_generated_data_catalog_update_entry_group_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_UpdateEntryGroup_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_update_entry_group_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.update_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.UpdateEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "UpdateEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.UpdateEntryRequest" - }, - { - "name": "entry", - "type": "google.cloud.datacatalog_v1.types.Entry" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.Entry", - "shortName": "update_entry" - }, - "description": "Sample for UpdateEntry", - "file": "datacatalog_v1_generated_data_catalog_update_entry_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_UpdateEntry_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_update_entry_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.update_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.UpdateEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "UpdateEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.UpdateEntryRequest" - }, - { - "name": "entry", - "type": "google.cloud.datacatalog_v1.types.Entry" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.Entry", - "shortName": "update_entry" - }, - "description": "Sample for UpdateEntry", - "file": "datacatalog_v1_generated_data_catalog_update_entry_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_UpdateEntry_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_update_entry_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.update_tag_template_field", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.UpdateTagTemplateField", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "UpdateTagTemplateField" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.UpdateTagTemplateFieldRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "tag_template_field", - "type": "google.cloud.datacatalog_v1.types.TagTemplateField" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.TagTemplateField", - "shortName": "update_tag_template_field" - }, - "description": "Sample for UpdateTagTemplateField", - "file": "datacatalog_v1_generated_data_catalog_update_tag_template_field_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_UpdateTagTemplateField_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_update_tag_template_field_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.update_tag_template_field", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.UpdateTagTemplateField", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "UpdateTagTemplateField" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.UpdateTagTemplateFieldRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "tag_template_field", - "type": "google.cloud.datacatalog_v1.types.TagTemplateField" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.TagTemplateField", - "shortName": "update_tag_template_field" - }, - "description": "Sample for UpdateTagTemplateField", - "file": "datacatalog_v1_generated_data_catalog_update_tag_template_field_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_UpdateTagTemplateField_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_update_tag_template_field_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.update_tag_template", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.UpdateTagTemplate", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "UpdateTagTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.UpdateTagTemplateRequest" - }, - { - "name": "tag_template", - "type": "google.cloud.datacatalog_v1.types.TagTemplate" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.TagTemplate", - "shortName": "update_tag_template" - }, - "description": "Sample for UpdateTagTemplate", - "file": "datacatalog_v1_generated_data_catalog_update_tag_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_UpdateTagTemplate_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_update_tag_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.update_tag_template", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.UpdateTagTemplate", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "UpdateTagTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.UpdateTagTemplateRequest" - }, - { - "name": "tag_template", - "type": "google.cloud.datacatalog_v1.types.TagTemplate" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.TagTemplate", - "shortName": "update_tag_template" - }, - "description": "Sample for UpdateTagTemplate", - "file": "datacatalog_v1_generated_data_catalog_update_tag_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_UpdateTagTemplate_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_update_tag_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.update_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.UpdateTag", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "UpdateTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.UpdateTagRequest" - }, - { - "name": "tag", - "type": "google.cloud.datacatalog_v1.types.Tag" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.Tag", - "shortName": "update_tag" - }, - "description": "Sample for UpdateTag", - "file": "datacatalog_v1_generated_data_catalog_update_tag_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_UpdateTag_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_update_tag_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.update_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog.UpdateTag", - "service": { - "fullName": "google.cloud.datacatalog.v1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "UpdateTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.UpdateTagRequest" - }, - { - "name": "tag", - "type": "google.cloud.datacatalog_v1.types.Tag" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.Tag", - "shortName": "update_tag" - }, - "description": "Sample for UpdateTag", - "file": "datacatalog_v1_generated_data_catalog_update_tag_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_DataCatalog_UpdateTag_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_data_catalog_update_tag_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationAsyncClient", - "shortName": "PolicyTagManagerSerializationAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationAsyncClient.export_taxonomies", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization", - "shortName": "PolicyTagManagerSerialization" - }, - "shortName": "ExportTaxonomies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ExportTaxonomiesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.ExportTaxonomiesResponse", - "shortName": "export_taxonomies" - }, - "description": "Sample for ExportTaxonomies", - "file": "datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManagerSerialization_ExportTaxonomies_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationClient", - "shortName": "PolicyTagManagerSerializationClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationClient.export_taxonomies", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ExportTaxonomies", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization", - "shortName": "PolicyTagManagerSerialization" - }, - "shortName": "ExportTaxonomies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ExportTaxonomiesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.ExportTaxonomiesResponse", - "shortName": "export_taxonomies" - }, - "description": "Sample for ExportTaxonomies", - "file": "datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManagerSerialization_ExportTaxonomies_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationAsyncClient", - "shortName": "PolicyTagManagerSerializationAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationAsyncClient.import_taxonomies", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization", - "shortName": "PolicyTagManagerSerialization" - }, - "shortName": "ImportTaxonomies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ImportTaxonomiesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.ImportTaxonomiesResponse", - "shortName": "import_taxonomies" - }, - "description": "Sample for ImportTaxonomies", - "file": "datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManagerSerialization_ImportTaxonomies_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationClient", - "shortName": "PolicyTagManagerSerializationClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationClient.import_taxonomies", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ImportTaxonomies", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization", - "shortName": "PolicyTagManagerSerialization" - }, - "shortName": "ImportTaxonomies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ImportTaxonomiesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.ImportTaxonomiesResponse", - "shortName": "import_taxonomies" - }, - "description": "Sample for ImportTaxonomies", - "file": "datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManagerSerialization_ImportTaxonomies_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationAsyncClient", - "shortName": "PolicyTagManagerSerializationAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationAsyncClient.replace_taxonomy", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ReplaceTaxonomy", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization", - "shortName": "PolicyTagManagerSerialization" - }, - "shortName": "ReplaceTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ReplaceTaxonomyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.Taxonomy", - "shortName": "replace_taxonomy" - }, - "description": "Sample for ReplaceTaxonomy", - "file": "datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManagerSerialization_ReplaceTaxonomy_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationClient", - "shortName": "PolicyTagManagerSerializationClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerSerializationClient.replace_taxonomy", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization.ReplaceTaxonomy", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManagerSerialization", - "shortName": "PolicyTagManagerSerialization" - }, - "shortName": "ReplaceTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ReplaceTaxonomyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.Taxonomy", - "shortName": "replace_taxonomy" - }, - "description": "Sample for ReplaceTaxonomy", - "file": "datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManagerSerialization_ReplaceTaxonomy_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_serialization_replace_taxonomy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.create_policy_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.CreatePolicyTag", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "CreatePolicyTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.CreatePolicyTagRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "policy_tag", - "type": "google.cloud.datacatalog_v1.types.PolicyTag" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.PolicyTag", - "shortName": "create_policy_tag" - }, - "description": "Sample for CreatePolicyTag", - "file": "datacatalog_v1_generated_policy_tag_manager_create_policy_tag_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_CreatePolicyTag_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_create_policy_tag_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.create_policy_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.CreatePolicyTag", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "CreatePolicyTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.CreatePolicyTagRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "policy_tag", - "type": "google.cloud.datacatalog_v1.types.PolicyTag" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.PolicyTag", - "shortName": "create_policy_tag" - }, - "description": "Sample for CreatePolicyTag", - "file": "datacatalog_v1_generated_policy_tag_manager_create_policy_tag_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_CreatePolicyTag_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_create_policy_tag_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.create_taxonomy", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.CreateTaxonomy", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "CreateTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.CreateTaxonomyRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "taxonomy", - "type": "google.cloud.datacatalog_v1.types.Taxonomy" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.Taxonomy", - "shortName": "create_taxonomy" - }, - "description": "Sample for CreateTaxonomy", - "file": "datacatalog_v1_generated_policy_tag_manager_create_taxonomy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_CreateTaxonomy_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_create_taxonomy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.create_taxonomy", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.CreateTaxonomy", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "CreateTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.CreateTaxonomyRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "taxonomy", - "type": "google.cloud.datacatalog_v1.types.Taxonomy" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.Taxonomy", - "shortName": "create_taxonomy" - }, - "description": "Sample for CreateTaxonomy", - "file": "datacatalog_v1_generated_policy_tag_manager_create_taxonomy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_CreateTaxonomy_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_create_taxonomy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.delete_policy_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.DeletePolicyTag", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "DeletePolicyTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.DeletePolicyTagRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_policy_tag" - }, - "description": "Sample for DeletePolicyTag", - "file": "datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_DeletePolicyTag_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.delete_policy_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.DeletePolicyTag", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "DeletePolicyTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.DeletePolicyTagRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_policy_tag" - }, - "description": "Sample for DeletePolicyTag", - "file": "datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_DeletePolicyTag_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_delete_policy_tag_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.delete_taxonomy", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.DeleteTaxonomy", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "DeleteTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.DeleteTaxonomyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_taxonomy" - }, - "description": "Sample for DeleteTaxonomy", - "file": "datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_DeleteTaxonomy_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.delete_taxonomy", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.DeleteTaxonomy", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "DeleteTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.DeleteTaxonomyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_taxonomy" - }, - "description": "Sample for DeleteTaxonomy", - "file": "datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_DeleteTaxonomy_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_delete_taxonomy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.get_iam_policy", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.GetIamPolicy", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "datacatalog_v1_generated_policy_tag_manager_get_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_GetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_get_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.get_iam_policy", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.GetIamPolicy", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "datacatalog_v1_generated_policy_tag_manager_get_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_GetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_get_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.get_policy_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.GetPolicyTag", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "GetPolicyTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.GetPolicyTagRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.PolicyTag", - "shortName": "get_policy_tag" - }, - "description": "Sample for GetPolicyTag", - "file": "datacatalog_v1_generated_policy_tag_manager_get_policy_tag_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_GetPolicyTag_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_get_policy_tag_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.get_policy_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.GetPolicyTag", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "GetPolicyTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.GetPolicyTagRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.PolicyTag", - "shortName": "get_policy_tag" - }, - "description": "Sample for GetPolicyTag", - "file": "datacatalog_v1_generated_policy_tag_manager_get_policy_tag_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_GetPolicyTag_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_get_policy_tag_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.get_taxonomy", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.GetTaxonomy", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "GetTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.GetTaxonomyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.Taxonomy", - "shortName": "get_taxonomy" - }, - "description": "Sample for GetTaxonomy", - "file": "datacatalog_v1_generated_policy_tag_manager_get_taxonomy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_GetTaxonomy_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_get_taxonomy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.get_taxonomy", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.GetTaxonomy", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "GetTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.GetTaxonomyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.Taxonomy", - "shortName": "get_taxonomy" - }, - "description": "Sample for GetTaxonomy", - "file": "datacatalog_v1_generated_policy_tag_manager_get_taxonomy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_GetTaxonomy_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_get_taxonomy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.list_policy_tags", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "ListPolicyTags" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ListPolicyTagsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListPolicyTagsAsyncPager", - "shortName": "list_policy_tags" - }, - "description": "Sample for ListPolicyTags", - "file": "datacatalog_v1_generated_policy_tag_manager_list_policy_tags_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_ListPolicyTags_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_list_policy_tags_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.list_policy_tags", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.ListPolicyTags", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "ListPolicyTags" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ListPolicyTagsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListPolicyTagsPager", - "shortName": "list_policy_tags" - }, - "description": "Sample for ListPolicyTags", - "file": "datacatalog_v1_generated_policy_tag_manager_list_policy_tags_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_ListPolicyTags_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_list_policy_tags_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.list_taxonomies", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "ListTaxonomies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ListTaxonomiesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListTaxonomiesAsyncPager", - "shortName": "list_taxonomies" - }, - "description": "Sample for ListTaxonomies", - "file": "datacatalog_v1_generated_policy_tag_manager_list_taxonomies_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_ListTaxonomies_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_list_taxonomies_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.list_taxonomies", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.ListTaxonomies", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "ListTaxonomies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.ListTaxonomiesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.services.policy_tag_manager.pagers.ListTaxonomiesPager", - "shortName": "list_taxonomies" - }, - "description": "Sample for ListTaxonomies", - "file": "datacatalog_v1_generated_policy_tag_manager_list_taxonomies_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_ListTaxonomies_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_list_taxonomies_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.set_iam_policy", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.SetIamPolicy", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "datacatalog_v1_generated_policy_tag_manager_set_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_SetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_set_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.set_iam_policy", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.SetIamPolicy", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "datacatalog_v1_generated_policy_tag_manager_set_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_SetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_set_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.test_iam_permissions", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.TestIamPermissions", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_TestIamPermissions_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.test_iam_permissions", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.TestIamPermissions", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_TestIamPermissions_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_test_iam_permissions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.update_policy_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.UpdatePolicyTag", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "UpdatePolicyTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.UpdatePolicyTagRequest" - }, - { - "name": "policy_tag", - "type": "google.cloud.datacatalog_v1.types.PolicyTag" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.PolicyTag", - "shortName": "update_policy_tag" - }, - "description": "Sample for UpdatePolicyTag", - "file": "datacatalog_v1_generated_policy_tag_manager_update_policy_tag_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_UpdatePolicyTag_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_update_policy_tag_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.update_policy_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.UpdatePolicyTag", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "UpdatePolicyTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.UpdatePolicyTagRequest" - }, - { - "name": "policy_tag", - "type": "google.cloud.datacatalog_v1.types.PolicyTag" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.PolicyTag", - "shortName": "update_policy_tag" - }, - "description": "Sample for UpdatePolicyTag", - "file": "datacatalog_v1_generated_policy_tag_manager_update_policy_tag_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_UpdatePolicyTag_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_update_policy_tag_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerAsyncClient.update_taxonomy", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.UpdateTaxonomy", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "UpdateTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.UpdateTaxonomyRequest" - }, - { - "name": "taxonomy", - "type": "google.cloud.datacatalog_v1.types.Taxonomy" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.Taxonomy", - "shortName": "update_taxonomy" - }, - "description": "Sample for UpdateTaxonomy", - "file": "datacatalog_v1_generated_policy_tag_manager_update_taxonomy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_UpdateTaxonomy_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_update_taxonomy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1.PolicyTagManagerClient.update_taxonomy", - "method": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager.UpdateTaxonomy", - "service": { - "fullName": "google.cloud.datacatalog.v1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "UpdateTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1.types.UpdateTaxonomyRequest" - }, - { - "name": "taxonomy", - "type": "google.cloud.datacatalog_v1.types.Taxonomy" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1.types.Taxonomy", - "shortName": "update_taxonomy" - }, - "description": "Sample for UpdateTaxonomy", - "file": "datacatalog_v1_generated_policy_tag_manager_update_taxonomy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1_generated_PolicyTagManager_UpdateTaxonomy_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1_generated_policy_tag_manager_update_taxonomy_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/scripts/fixup_datacatalog_v1_keywords.py b/owl-bot-staging/google-cloud-datacatalog/v1/scripts/fixup_datacatalog_v1_keywords.py deleted file mode 100644 index 7fe4359520fe..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/scripts/fixup_datacatalog_v1_keywords.py +++ /dev/null @@ -1,222 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class datacatalogCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_entry': ('parent', 'entry_id', 'entry', ), - 'create_entry_group': ('parent', 'entry_group_id', 'entry_group', ), - 'create_policy_tag': ('parent', 'policy_tag', ), - 'create_tag': ('parent', 'tag', ), - 'create_tag_template': ('parent', 'tag_template_id', 'tag_template', ), - 'create_tag_template_field': ('parent', 'tag_template_field_id', 'tag_template_field', ), - 'create_taxonomy': ('parent', 'taxonomy', ), - 'delete_entry': ('name', ), - 'delete_entry_group': ('name', 'force', ), - 'delete_policy_tag': ('name', ), - 'delete_tag': ('name', ), - 'delete_tag_template': ('name', 'force', ), - 'delete_tag_template_field': ('name', 'force', ), - 'delete_taxonomy': ('name', ), - 'export_taxonomies': ('parent', 'taxonomies', 'serialized_taxonomies', ), - 'get_entry': ('name', ), - 'get_entry_group': ('name', 'read_mask', ), - 'get_iam_policy': ('resource', 'options', ), - 'get_policy_tag': ('name', ), - 'get_tag_template': ('name', ), - 'get_taxonomy': ('name', ), - 'import_entries': ('parent', 'gcs_bucket_path', 'job_id', ), - 'import_taxonomies': ('parent', 'inline_source', 'cross_regional_source', ), - 'list_entries': ('parent', 'page_size', 'page_token', 'read_mask', ), - 'list_entry_groups': ('parent', 'page_size', 'page_token', ), - 'list_policy_tags': ('parent', 'page_size', 'page_token', ), - 'list_tags': ('parent', 'page_size', 'page_token', ), - 'list_taxonomies': ('parent', 'page_size', 'page_token', 'filter', ), - 'lookup_entry': ('linked_resource', 'sql_resource', 'fully_qualified_name', 'project', 'location', ), - 'modify_entry_contacts': ('name', 'contacts', ), - 'modify_entry_overview': ('name', 'entry_overview', ), - 'reconcile_tags': ('parent', 'tag_template', 'force_delete_missing', 'tags', ), - 'rename_tag_template_field': ('name', 'new_tag_template_field_id', ), - 'rename_tag_template_field_enum_value': ('name', 'new_enum_value_display_name', ), - 'replace_taxonomy': ('name', 'serialized_taxonomy', ), - 'search_catalog': ('scope', 'query', 'page_size', 'page_token', 'order_by', 'admin_search', ), - 'set_iam_policy': ('resource', 'policy', 'update_mask', ), - 'star_entry': ('name', ), - 'test_iam_permissions': ('resource', 'permissions', ), - 'unstar_entry': ('name', ), - 'update_entry': ('entry', 'update_mask', ), - 'update_entry_group': ('entry_group', 'update_mask', ), - 'update_policy_tag': ('policy_tag', 'update_mask', ), - 'update_tag': ('tag', 'update_mask', ), - 'update_tag_template': ('tag_template', 'update_mask', ), - 'update_tag_template_field': ('name', 'tag_template_field', 'update_mask', ), - 'update_taxonomy': ('taxonomy', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=datacatalogCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the datacatalog client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/setup.py b/owl-bot-staging/google-cloud-datacatalog/v1/setup.py deleted file mode 100644 index 1d59854d3bcb..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/setup.py +++ /dev/null @@ -1,91 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-datacatalog' - - -description = "Google Cloud Datacatalog API client library" - -version = {} -with open(os.path.join(package_root, 'google/cloud/datacatalog/gapic_version.py')) as fp: - exec(fp.read(), version) -version = version["__version__"] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", -] -url = "https://github.com/googleapis/python-datacatalog" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.PEP420PackageFinder.find() - if package.startswith("google") -] - -namespaces = ["google", "google.cloud"] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - namespace_packages=namespaces, - install_requires=dependencies, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.10.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.11.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.12.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.7.txt deleted file mode 100644 index 2beecf99e0be..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 -proto-plus==1.22.0 -protobuf==3.19.5 -grpc-google-iam-v1==0.12.4 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.8.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.9.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/tests/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/tests/__init__.py deleted file mode 100644 index 1b4db446eb8d..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/__init__.py deleted file mode 100644 index 1b4db446eb8d..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 1b4db446eb8d..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/__init__.py deleted file mode 100644 index 1b4db446eb8d..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_data_catalog.py b/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_data_catalog.py deleted file mode 100644 index 011518302885..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_data_catalog.py +++ /dev/null @@ -1,10354 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.datacatalog_v1.services.data_catalog import DataCatalogAsyncClient -from google.cloud.datacatalog_v1.services.data_catalog import DataCatalogClient -from google.cloud.datacatalog_v1.services.data_catalog import pagers -from google.cloud.datacatalog_v1.services.data_catalog import transports -from google.cloud.datacatalog_v1.types import bigquery -from google.cloud.datacatalog_v1.types import common -from google.cloud.datacatalog_v1.types import data_source -from google.cloud.datacatalog_v1.types import datacatalog -from google.cloud.datacatalog_v1.types import dataplex_spec -from google.cloud.datacatalog_v1.types import gcs_fileset_spec -from google.cloud.datacatalog_v1.types import physical_schema -from google.cloud.datacatalog_v1.types import schema -from google.cloud.datacatalog_v1.types import search -from google.cloud.datacatalog_v1.types import table_spec -from google.cloud.datacatalog_v1.types import tags -from google.cloud.datacatalog_v1.types import timestamps -from google.cloud.datacatalog_v1.types import usage -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert DataCatalogClient._get_default_mtls_endpoint(None) is None - assert DataCatalogClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert DataCatalogClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert DataCatalogClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert DataCatalogClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert DataCatalogClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataCatalogClient, "grpc"), - (DataCatalogAsyncClient, "grpc_asyncio"), -]) -def test_data_catalog_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'datacatalog.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.DataCatalogGrpcTransport, "grpc"), - (transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_data_catalog_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataCatalogClient, "grpc"), - (DataCatalogAsyncClient, "grpc_asyncio"), -]) -def test_data_catalog_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'datacatalog.googleapis.com:443' - ) - - -def test_data_catalog_client_get_transport_class(): - transport = DataCatalogClient.get_transport_class() - available_transports = [ - transports.DataCatalogGrpcTransport, - ] - assert transport in available_transports - - transport = DataCatalogClient.get_transport_class("grpc") - assert transport == transports.DataCatalogGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc"), - (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(DataCatalogClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogClient)) -@mock.patch.object(DataCatalogAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogAsyncClient)) -def test_data_catalog_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(DataCatalogClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(DataCatalogClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", "true"), - (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", "false"), - (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(DataCatalogClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogClient)) -@mock.patch.object(DataCatalogAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_data_catalog_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - DataCatalogClient, DataCatalogAsyncClient -]) -@mock.patch.object(DataCatalogClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogClient)) -@mock.patch.object(DataCatalogAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogAsyncClient)) -def test_data_catalog_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc"), - (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_data_catalog_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", grpc_helpers), - (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_data_catalog_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_data_catalog_client_client_options_from_dict(): - with mock.patch('google.cloud.datacatalog_v1.services.data_catalog.transports.DataCatalogGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = DataCatalogClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", grpc_helpers), - (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_data_catalog_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "datacatalog.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="datacatalog.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.SearchCatalogRequest, - dict, -]) -def test_search_catalog(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.SearchCatalogResponse( - total_size=1086, - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.search_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.SearchCatalogRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchCatalogPager) - assert response.total_size == 1086 - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_search_catalog_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_catalog), - '__call__') as call: - client.search_catalog() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.SearchCatalogRequest() - -@pytest.mark.asyncio -async def test_search_catalog_async(transport: str = 'grpc_asyncio', request_type=datacatalog.SearchCatalogRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.SearchCatalogResponse( - total_size=1086, - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.search_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.SearchCatalogRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchCatalogAsyncPager) - assert response.total_size == 1086 - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_search_catalog_async_from_dict(): - await test_search_catalog_async(request_type=dict) - - -def test_search_catalog_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.SearchCatalogResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.search_catalog( - scope=datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']), - query='query_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].scope - mock_val = datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']) - assert arg == mock_val - arg = args[0].query - mock_val = 'query_value' - assert arg == mock_val - - -def test_search_catalog_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_catalog( - datacatalog.SearchCatalogRequest(), - scope=datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']), - query='query_value', - ) - -@pytest.mark.asyncio -async def test_search_catalog_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.SearchCatalogResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.SearchCatalogResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.search_catalog( - scope=datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']), - query='query_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].scope - mock_val = datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']) - assert arg == mock_val - arg = args[0].query - mock_val = 'query_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_search_catalog_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.search_catalog( - datacatalog.SearchCatalogRequest(), - scope=datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']), - query='query_value', - ) - - -def test_search_catalog_pager(transport_name: str = "grpc"): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_catalog), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - search.SearchCatalogResult(), - search.SearchCatalogResult(), - ], - next_page_token='abc', - ), - datacatalog.SearchCatalogResponse( - results=[], - next_page_token='def', - ), - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - ], - next_page_token='ghi', - ), - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - search.SearchCatalogResult(), - ], - ), - RuntimeError, - ) - - metadata = () - pager = client.search_catalog(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, search.SearchCatalogResult) - for i in results) -def test_search_catalog_pages(transport_name: str = "grpc"): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_catalog), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - search.SearchCatalogResult(), - search.SearchCatalogResult(), - ], - next_page_token='abc', - ), - datacatalog.SearchCatalogResponse( - results=[], - next_page_token='def', - ), - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - ], - next_page_token='ghi', - ), - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - search.SearchCatalogResult(), - ], - ), - RuntimeError, - ) - pages = list(client.search_catalog(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_search_catalog_async_pager(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_catalog), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - search.SearchCatalogResult(), - search.SearchCatalogResult(), - ], - next_page_token='abc', - ), - datacatalog.SearchCatalogResponse( - results=[], - next_page_token='def', - ), - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - ], - next_page_token='ghi', - ), - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - search.SearchCatalogResult(), - ], - ), - RuntimeError, - ) - async_pager = await client.search_catalog(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, search.SearchCatalogResult) - for i in responses) - - -@pytest.mark.asyncio -async def test_search_catalog_async_pages(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_catalog), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - search.SearchCatalogResult(), - search.SearchCatalogResult(), - ], - next_page_token='abc', - ), - datacatalog.SearchCatalogResponse( - results=[], - next_page_token='def', - ), - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - ], - next_page_token='ghi', - ), - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - search.SearchCatalogResult(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.search_catalog(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - datacatalog.CreateEntryGroupRequest, - dict, -]) -def test_create_entry_group(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.EntryGroup( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.create_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.EntryGroup) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_create_entry_group_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - client.create_entry_group() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryGroupRequest() - -@pytest.mark.asyncio -async def test_create_entry_group_async(transport: str = 'grpc_asyncio', request_type=datacatalog.CreateEntryGroupRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.create_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.EntryGroup) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_create_entry_group_async_from_dict(): - await test_create_entry_group_async(request_type=dict) - - -def test_create_entry_group_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.CreateEntryGroupRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - call.return_value = datacatalog.EntryGroup() - client.create_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_entry_group_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.CreateEntryGroupRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) - await client.create_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_entry_group_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.EntryGroup() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_entry_group( - parent='parent_value', - entry_group_id='entry_group_id_value', - entry_group=datacatalog.EntryGroup(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entry_group_id - mock_val = 'entry_group_id_value' - assert arg == mock_val - arg = args[0].entry_group - mock_val = datacatalog.EntryGroup(name='name_value') - assert arg == mock_val - - -def test_create_entry_group_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_entry_group( - datacatalog.CreateEntryGroupRequest(), - parent='parent_value', - entry_group_id='entry_group_id_value', - entry_group=datacatalog.EntryGroup(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_entry_group_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.EntryGroup() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_entry_group( - parent='parent_value', - entry_group_id='entry_group_id_value', - entry_group=datacatalog.EntryGroup(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entry_group_id - mock_val = 'entry_group_id_value' - assert arg == mock_val - arg = args[0].entry_group - mock_val = datacatalog.EntryGroup(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_entry_group_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_entry_group( - datacatalog.CreateEntryGroupRequest(), - parent='parent_value', - entry_group_id='entry_group_id_value', - entry_group=datacatalog.EntryGroup(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.GetEntryGroupRequest, - dict, -]) -def test_get_entry_group(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.EntryGroup( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.get_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.EntryGroup) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_get_entry_group_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - client.get_entry_group() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryGroupRequest() - -@pytest.mark.asyncio -async def test_get_entry_group_async(transport: str = 'grpc_asyncio', request_type=datacatalog.GetEntryGroupRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.get_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.EntryGroup) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_get_entry_group_async_from_dict(): - await test_get_entry_group_async(request_type=dict) - - -def test_get_entry_group_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.GetEntryGroupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - call.return_value = datacatalog.EntryGroup() - client.get_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_entry_group_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.GetEntryGroupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) - await client.get_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_entry_group_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.EntryGroup() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_entry_group( - name='name_value', - read_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].read_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_get_entry_group_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_entry_group( - datacatalog.GetEntryGroupRequest(), - name='name_value', - read_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_get_entry_group_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.EntryGroup() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_entry_group( - name='name_value', - read_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].read_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_entry_group_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_entry_group( - datacatalog.GetEntryGroupRequest(), - name='name_value', - read_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.UpdateEntryGroupRequest, - dict, -]) -def test_update_entry_group(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.EntryGroup( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.update_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.EntryGroup) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_update_entry_group_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - client.update_entry_group() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryGroupRequest() - -@pytest.mark.asyncio -async def test_update_entry_group_async(transport: str = 'grpc_asyncio', request_type=datacatalog.UpdateEntryGroupRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.update_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.EntryGroup) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_update_entry_group_async_from_dict(): - await test_update_entry_group_async(request_type=dict) - - -def test_update_entry_group_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.UpdateEntryGroupRequest() - - request.entry_group.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - call.return_value = datacatalog.EntryGroup() - client.update_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entry_group.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_entry_group_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.UpdateEntryGroupRequest() - - request.entry_group.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) - await client.update_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entry_group.name=name_value', - ) in kw['metadata'] - - -def test_update_entry_group_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.EntryGroup() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_entry_group( - entry_group=datacatalog.EntryGroup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].entry_group - mock_val = datacatalog.EntryGroup(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_entry_group_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_entry_group( - datacatalog.UpdateEntryGroupRequest(), - entry_group=datacatalog.EntryGroup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_entry_group_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.EntryGroup() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_entry_group( - entry_group=datacatalog.EntryGroup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].entry_group - mock_val = datacatalog.EntryGroup(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_entry_group_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_entry_group( - datacatalog.UpdateEntryGroupRequest(), - entry_group=datacatalog.EntryGroup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.DeleteEntryGroupRequest, - dict, -]) -def test_delete_entry_group(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryGroupRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_entry_group_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - client.delete_entry_group() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryGroupRequest() - -@pytest.mark.asyncio -async def test_delete_entry_group_async(transport: str = 'grpc_asyncio', request_type=datacatalog.DeleteEntryGroupRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryGroupRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_entry_group_async_from_dict(): - await test_delete_entry_group_async(request_type=dict) - - -def test_delete_entry_group_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.DeleteEntryGroupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - call.return_value = None - client.delete_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_entry_group_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.DeleteEntryGroupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_entry_group_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_entry_group( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_entry_group_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_entry_group( - datacatalog.DeleteEntryGroupRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_entry_group_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_entry_group( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_entry_group_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_entry_group( - datacatalog.DeleteEntryGroupRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.ListEntryGroupsRequest, - dict, -]) -def test_list_entry_groups(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.ListEntryGroupsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_entry_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntryGroupsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntryGroupsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_entry_groups_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - client.list_entry_groups() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntryGroupsRequest() - -@pytest.mark.asyncio -async def test_list_entry_groups_async(transport: str = 'grpc_asyncio', request_type=datacatalog.ListEntryGroupsRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntryGroupsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_entry_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntryGroupsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntryGroupsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_entry_groups_async_from_dict(): - await test_list_entry_groups_async(request_type=dict) - - -def test_list_entry_groups_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.ListEntryGroupsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - call.return_value = datacatalog.ListEntryGroupsResponse() - client.list_entry_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_entry_groups_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.ListEntryGroupsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntryGroupsResponse()) - await client.list_entry_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_entry_groups_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.ListEntryGroupsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_entry_groups( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_entry_groups_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_entry_groups( - datacatalog.ListEntryGroupsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_entry_groups_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.ListEntryGroupsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntryGroupsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_entry_groups( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_entry_groups_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_entry_groups( - datacatalog.ListEntryGroupsRequest(), - parent='parent_value', - ) - - -def test_list_entry_groups_pager(transport_name: str = "grpc"): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - ], - next_page_token='abc', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[], - next_page_token='def', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - ], - next_page_token='ghi', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_entry_groups(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, datacatalog.EntryGroup) - for i in results) -def test_list_entry_groups_pages(transport_name: str = "grpc"): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - ], - next_page_token='abc', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[], - next_page_token='def', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - ], - next_page_token='ghi', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - ], - ), - RuntimeError, - ) - pages = list(client.list_entry_groups(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_entry_groups_async_pager(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - ], - next_page_token='abc', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[], - next_page_token='def', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - ], - next_page_token='ghi', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_entry_groups(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, datacatalog.EntryGroup) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_entry_groups_async_pages(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - ], - next_page_token='abc', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[], - next_page_token='def', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - ], - next_page_token='ghi', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_entry_groups(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - datacatalog.CreateEntryRequest, - dict, -]) -def test_create_entry(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.Entry( - name='name_value', - linked_resource='linked_resource_value', - fully_qualified_name='fully_qualified_name_value', - display_name='display_name_value', - description='description_value', - type_=datacatalog.EntryType.TABLE, - integrated_system=common.IntegratedSystem.BIGQUERY, - ) - response = client.create_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == 'name_value' - assert response.linked_resource == 'linked_resource_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_create_entry_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - client.create_entry() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryRequest() - -@pytest.mark.asyncio -async def test_create_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.CreateEntryRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry( - name='name_value', - linked_resource='linked_resource_value', - fully_qualified_name='fully_qualified_name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.create_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == 'name_value' - assert response.linked_resource == 'linked_resource_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_create_entry_async_from_dict(): - await test_create_entry_async(request_type=dict) - - -def test_create_entry_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.CreateEntryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - call.return_value = datacatalog.Entry() - client.create_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_entry_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.CreateEntryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) - await client.create_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_entry_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.Entry() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_entry( - parent='parent_value', - entry_id='entry_id_value', - entry=datacatalog.Entry(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entry_id - mock_val = 'entry_id_value' - assert arg == mock_val - arg = args[0].entry - mock_val = datacatalog.Entry(name='name_value') - assert arg == mock_val - - -def test_create_entry_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_entry( - datacatalog.CreateEntryRequest(), - parent='parent_value', - entry_id='entry_id_value', - entry=datacatalog.Entry(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_entry_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.Entry() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_entry( - parent='parent_value', - entry_id='entry_id_value', - entry=datacatalog.Entry(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entry_id - mock_val = 'entry_id_value' - assert arg == mock_val - arg = args[0].entry - mock_val = datacatalog.Entry(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_entry_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_entry( - datacatalog.CreateEntryRequest(), - parent='parent_value', - entry_id='entry_id_value', - entry=datacatalog.Entry(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.UpdateEntryRequest, - dict, -]) -def test_update_entry(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.Entry( - name='name_value', - linked_resource='linked_resource_value', - fully_qualified_name='fully_qualified_name_value', - display_name='display_name_value', - description='description_value', - type_=datacatalog.EntryType.TABLE, - integrated_system=common.IntegratedSystem.BIGQUERY, - ) - response = client.update_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == 'name_value' - assert response.linked_resource == 'linked_resource_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_update_entry_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - client.update_entry() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryRequest() - -@pytest.mark.asyncio -async def test_update_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.UpdateEntryRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry( - name='name_value', - linked_resource='linked_resource_value', - fully_qualified_name='fully_qualified_name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.update_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == 'name_value' - assert response.linked_resource == 'linked_resource_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_update_entry_async_from_dict(): - await test_update_entry_async(request_type=dict) - - -def test_update_entry_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.UpdateEntryRequest() - - request.entry.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - call.return_value = datacatalog.Entry() - client.update_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entry.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_entry_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.UpdateEntryRequest() - - request.entry.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) - await client.update_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entry.name=name_value', - ) in kw['metadata'] - - -def test_update_entry_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.Entry() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_entry( - entry=datacatalog.Entry(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].entry - mock_val = datacatalog.Entry(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_entry_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_entry( - datacatalog.UpdateEntryRequest(), - entry=datacatalog.Entry(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_entry_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.Entry() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_entry( - entry=datacatalog.Entry(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].entry - mock_val = datacatalog.Entry(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_entry_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_entry( - datacatalog.UpdateEntryRequest(), - entry=datacatalog.Entry(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.DeleteEntryRequest, - dict, -]) -def test_delete_entry(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_entry_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - client.delete_entry() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryRequest() - -@pytest.mark.asyncio -async def test_delete_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.DeleteEntryRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_entry_async_from_dict(): - await test_delete_entry_async(request_type=dict) - - -def test_delete_entry_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.DeleteEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - call.return_value = None - client.delete_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_entry_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.DeleteEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_entry_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_entry( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_entry_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_entry( - datacatalog.DeleteEntryRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_entry_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_entry( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_entry_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_entry( - datacatalog.DeleteEntryRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.GetEntryRequest, - dict, -]) -def test_get_entry(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.Entry( - name='name_value', - linked_resource='linked_resource_value', - fully_qualified_name='fully_qualified_name_value', - display_name='display_name_value', - description='description_value', - type_=datacatalog.EntryType.TABLE, - integrated_system=common.IntegratedSystem.BIGQUERY, - ) - response = client.get_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == 'name_value' - assert response.linked_resource == 'linked_resource_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_get_entry_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - client.get_entry() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryRequest() - -@pytest.mark.asyncio -async def test_get_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.GetEntryRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry( - name='name_value', - linked_resource='linked_resource_value', - fully_qualified_name='fully_qualified_name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.get_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == 'name_value' - assert response.linked_resource == 'linked_resource_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_get_entry_async_from_dict(): - await test_get_entry_async(request_type=dict) - - -def test_get_entry_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.GetEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - call.return_value = datacatalog.Entry() - client.get_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_entry_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.GetEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) - await client.get_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_entry_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.Entry() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_entry( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_entry_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_entry( - datacatalog.GetEntryRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_entry_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.Entry() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_entry( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_entry_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_entry( - datacatalog.GetEntryRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.LookupEntryRequest, - dict, -]) -def test_lookup_entry(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.Entry( - name='name_value', - linked_resource='linked_resource_value', - fully_qualified_name='fully_qualified_name_value', - display_name='display_name_value', - description='description_value', - type_=datacatalog.EntryType.TABLE, - integrated_system=common.IntegratedSystem.BIGQUERY, - ) - response = client.lookup_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.LookupEntryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == 'name_value' - assert response.linked_resource == 'linked_resource_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_lookup_entry_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_entry), - '__call__') as call: - client.lookup_entry() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.LookupEntryRequest() - -@pytest.mark.asyncio -async def test_lookup_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.LookupEntryRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry( - name='name_value', - linked_resource='linked_resource_value', - fully_qualified_name='fully_qualified_name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.lookup_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.LookupEntryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == 'name_value' - assert response.linked_resource == 'linked_resource_value' - assert response.fully_qualified_name == 'fully_qualified_name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_lookup_entry_async_from_dict(): - await test_lookup_entry_async(request_type=dict) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.ListEntriesRequest, - dict, -]) -def test_list_entries(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.ListEntriesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntriesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_entries_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - client.list_entries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntriesRequest() - -@pytest.mark.asyncio -async def test_list_entries_async(transport: str = 'grpc_asyncio', request_type=datacatalog.ListEntriesRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntriesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntriesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_entries_async_from_dict(): - await test_list_entries_async(request_type=dict) - - -def test_list_entries_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.ListEntriesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - call.return_value = datacatalog.ListEntriesResponse() - client.list_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_entries_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.ListEntriesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntriesResponse()) - await client.list_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_entries_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.ListEntriesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_entries( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_entries_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_entries( - datacatalog.ListEntriesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_entries_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.ListEntriesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntriesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_entries( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_entries_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_entries( - datacatalog.ListEntriesRequest(), - parent='parent_value', - ) - - -def test_list_entries_pager(transport_name: str = "grpc"): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - datacatalog.Entry(), - datacatalog.Entry(), - ], - next_page_token='abc', - ), - datacatalog.ListEntriesResponse( - entries=[], - next_page_token='def', - ), - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - ], - next_page_token='ghi', - ), - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - datacatalog.Entry(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_entries(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, datacatalog.Entry) - for i in results) -def test_list_entries_pages(transport_name: str = "grpc"): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - datacatalog.Entry(), - datacatalog.Entry(), - ], - next_page_token='abc', - ), - datacatalog.ListEntriesResponse( - entries=[], - next_page_token='def', - ), - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - ], - next_page_token='ghi', - ), - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - datacatalog.Entry(), - ], - ), - RuntimeError, - ) - pages = list(client.list_entries(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_entries_async_pager(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - datacatalog.Entry(), - datacatalog.Entry(), - ], - next_page_token='abc', - ), - datacatalog.ListEntriesResponse( - entries=[], - next_page_token='def', - ), - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - ], - next_page_token='ghi', - ), - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - datacatalog.Entry(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_entries(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, datacatalog.Entry) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_entries_async_pages(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - datacatalog.Entry(), - datacatalog.Entry(), - ], - next_page_token='abc', - ), - datacatalog.ListEntriesResponse( - entries=[], - next_page_token='def', - ), - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - ], - next_page_token='ghi', - ), - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - datacatalog.Entry(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_entries(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - datacatalog.ModifyEntryOverviewRequest, - dict, -]) -def test_modify_entry_overview(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_entry_overview), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.EntryOverview( - overview='overview_value', - ) - response = client.modify_entry_overview(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ModifyEntryOverviewRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.EntryOverview) - assert response.overview == 'overview_value' - - -def test_modify_entry_overview_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_entry_overview), - '__call__') as call: - client.modify_entry_overview() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ModifyEntryOverviewRequest() - -@pytest.mark.asyncio -async def test_modify_entry_overview_async(transport: str = 'grpc_asyncio', request_type=datacatalog.ModifyEntryOverviewRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_entry_overview), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryOverview( - overview='overview_value', - )) - response = await client.modify_entry_overview(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ModifyEntryOverviewRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.EntryOverview) - assert response.overview == 'overview_value' - - -@pytest.mark.asyncio -async def test_modify_entry_overview_async_from_dict(): - await test_modify_entry_overview_async(request_type=dict) - - -def test_modify_entry_overview_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.ModifyEntryOverviewRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_entry_overview), - '__call__') as call: - call.return_value = datacatalog.EntryOverview() - client.modify_entry_overview(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_modify_entry_overview_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.ModifyEntryOverviewRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_entry_overview), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryOverview()) - await client.modify_entry_overview(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - datacatalog.ModifyEntryContactsRequest, - dict, -]) -def test_modify_entry_contacts(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_entry_contacts), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.Contacts( - ) - response = client.modify_entry_contacts(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ModifyEntryContactsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Contacts) - - -def test_modify_entry_contacts_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_entry_contacts), - '__call__') as call: - client.modify_entry_contacts() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ModifyEntryContactsRequest() - -@pytest.mark.asyncio -async def test_modify_entry_contacts_async(transport: str = 'grpc_asyncio', request_type=datacatalog.ModifyEntryContactsRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_entry_contacts), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Contacts( - )) - response = await client.modify_entry_contacts(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ModifyEntryContactsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Contacts) - - -@pytest.mark.asyncio -async def test_modify_entry_contacts_async_from_dict(): - await test_modify_entry_contacts_async(request_type=dict) - - -def test_modify_entry_contacts_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.ModifyEntryContactsRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_entry_contacts), - '__call__') as call: - call.return_value = datacatalog.Contacts() - client.modify_entry_contacts(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_modify_entry_contacts_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.ModifyEntryContactsRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_entry_contacts), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Contacts()) - await client.modify_entry_contacts(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - datacatalog.CreateTagTemplateRequest, - dict, -]) -def test_create_tag_template(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplate( - name='name_value', - display_name='display_name_value', - is_publicly_readable=True, - ) - response = client.create_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.is_publicly_readable is True - - -def test_create_tag_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template), - '__call__') as call: - client.create_tag_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateRequest() - -@pytest.mark.asyncio -async def test_create_tag_template_async(transport: str = 'grpc_asyncio', request_type=datacatalog.CreateTagTemplateRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate( - name='name_value', - display_name='display_name_value', - is_publicly_readable=True, - )) - response = await client.create_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.is_publicly_readable is True - - -@pytest.mark.asyncio -async def test_create_tag_template_async_from_dict(): - await test_create_tag_template_async(request_type=dict) - - -def test_create_tag_template_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.CreateTagTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template), - '__call__') as call: - call.return_value = tags.TagTemplate() - client.create_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_tag_template_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.CreateTagTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) - await client.create_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_tag_template_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_tag_template( - parent='parent_value', - tag_template_id='tag_template_id_value', - tag_template=tags.TagTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].tag_template_id - mock_val = 'tag_template_id_value' - assert arg == mock_val - arg = args[0].tag_template - mock_val = tags.TagTemplate(name='name_value') - assert arg == mock_val - - -def test_create_tag_template_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_tag_template( - datacatalog.CreateTagTemplateRequest(), - parent='parent_value', - tag_template_id='tag_template_id_value', - tag_template=tags.TagTemplate(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_tag_template_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_tag_template( - parent='parent_value', - tag_template_id='tag_template_id_value', - tag_template=tags.TagTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].tag_template_id - mock_val = 'tag_template_id_value' - assert arg == mock_val - arg = args[0].tag_template - mock_val = tags.TagTemplate(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_tag_template_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_tag_template( - datacatalog.CreateTagTemplateRequest(), - parent='parent_value', - tag_template_id='tag_template_id_value', - tag_template=tags.TagTemplate(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.GetTagTemplateRequest, - dict, -]) -def test_get_tag_template(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplate( - name='name_value', - display_name='display_name_value', - is_publicly_readable=True, - ) - response = client.get_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetTagTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.is_publicly_readable is True - - -def test_get_tag_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_tag_template), - '__call__') as call: - client.get_tag_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetTagTemplateRequest() - -@pytest.mark.asyncio -async def test_get_tag_template_async(transport: str = 'grpc_asyncio', request_type=datacatalog.GetTagTemplateRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate( - name='name_value', - display_name='display_name_value', - is_publicly_readable=True, - )) - response = await client.get_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetTagTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.is_publicly_readable is True - - -@pytest.mark.asyncio -async def test_get_tag_template_async_from_dict(): - await test_get_tag_template_async(request_type=dict) - - -def test_get_tag_template_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.GetTagTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_tag_template), - '__call__') as call: - call.return_value = tags.TagTemplate() - client.get_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_tag_template_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.GetTagTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_tag_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) - await client.get_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_tag_template_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_tag_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_tag_template_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_tag_template( - datacatalog.GetTagTemplateRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_tag_template_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_tag_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_tag_template_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_tag_template( - datacatalog.GetTagTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.UpdateTagTemplateRequest, - dict, -]) -def test_update_tag_template(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplate( - name='name_value', - display_name='display_name_value', - is_publicly_readable=True, - ) - response = client.update_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.is_publicly_readable is True - - -def test_update_tag_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template), - '__call__') as call: - client.update_tag_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateRequest() - -@pytest.mark.asyncio -async def test_update_tag_template_async(transport: str = 'grpc_asyncio', request_type=datacatalog.UpdateTagTemplateRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate( - name='name_value', - display_name='display_name_value', - is_publicly_readable=True, - )) - response = await client.update_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.is_publicly_readable is True - - -@pytest.mark.asyncio -async def test_update_tag_template_async_from_dict(): - await test_update_tag_template_async(request_type=dict) - - -def test_update_tag_template_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.UpdateTagTemplateRequest() - - request.tag_template.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template), - '__call__') as call: - call.return_value = tags.TagTemplate() - client.update_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'tag_template.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_tag_template_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.UpdateTagTemplateRequest() - - request.tag_template.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) - await client.update_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'tag_template.name=name_value', - ) in kw['metadata'] - - -def test_update_tag_template_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_tag_template( - tag_template=tags.TagTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].tag_template - mock_val = tags.TagTemplate(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_tag_template_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_tag_template( - datacatalog.UpdateTagTemplateRequest(), - tag_template=tags.TagTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_tag_template_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_tag_template( - tag_template=tags.TagTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].tag_template - mock_val = tags.TagTemplate(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_tag_template_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_tag_template( - datacatalog.UpdateTagTemplateRequest(), - tag_template=tags.TagTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.DeleteTagTemplateRequest, - dict, -]) -def test_delete_tag_template(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_tag_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template), - '__call__') as call: - client.delete_tag_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateRequest() - -@pytest.mark.asyncio -async def test_delete_tag_template_async(transport: str = 'grpc_asyncio', request_type=datacatalog.DeleteTagTemplateRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_tag_template_async_from_dict(): - await test_delete_tag_template_async(request_type=dict) - - -def test_delete_tag_template_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.DeleteTagTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template), - '__call__') as call: - call.return_value = None - client.delete_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_tag_template_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.DeleteTagTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_tag_template_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_tag_template( - name='name_value', - force=True, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].force - mock_val = True - assert arg == mock_val - - -def test_delete_tag_template_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_tag_template( - datacatalog.DeleteTagTemplateRequest(), - name='name_value', - force=True, - ) - -@pytest.mark.asyncio -async def test_delete_tag_template_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_tag_template( - name='name_value', - force=True, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].force - mock_val = True - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_tag_template_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_tag_template( - datacatalog.DeleteTagTemplateRequest(), - name='name_value', - force=True, - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.CreateTagTemplateFieldRequest, - dict, -]) -def test_create_tag_template_field(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField( - name='name_value', - display_name='display_name_value', - is_required=True, - description='description_value', - order=540, - ) - response = client.create_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateFieldRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplateField) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.is_required is True - assert response.description == 'description_value' - assert response.order == 540 - - -def test_create_tag_template_field_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template_field), - '__call__') as call: - client.create_tag_template_field() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateFieldRequest() - -@pytest.mark.asyncio -async def test_create_tag_template_field_async(transport: str = 'grpc_asyncio', request_type=datacatalog.CreateTagTemplateFieldRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField( - name='name_value', - display_name='display_name_value', - is_required=True, - description='description_value', - order=540, - )) - response = await client.create_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateFieldRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplateField) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.is_required is True - assert response.description == 'description_value' - assert response.order == 540 - - -@pytest.mark.asyncio -async def test_create_tag_template_field_async_from_dict(): - await test_create_tag_template_field_async(request_type=dict) - - -def test_create_tag_template_field_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.CreateTagTemplateFieldRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template_field), - '__call__') as call: - call.return_value = tags.TagTemplateField() - client.create_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_tag_template_field_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.CreateTagTemplateFieldRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template_field), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) - await client.create_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_tag_template_field_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_tag_template_field( - parent='parent_value', - tag_template_field_id='tag_template_field_id_value', - tag_template_field=tags.TagTemplateField(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].tag_template_field_id - mock_val = 'tag_template_field_id_value' - assert arg == mock_val - arg = args[0].tag_template_field - mock_val = tags.TagTemplateField(name='name_value') - assert arg == mock_val - - -def test_create_tag_template_field_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_tag_template_field( - datacatalog.CreateTagTemplateFieldRequest(), - parent='parent_value', - tag_template_field_id='tag_template_field_id_value', - tag_template_field=tags.TagTemplateField(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_tag_template_field_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_tag_template_field( - parent='parent_value', - tag_template_field_id='tag_template_field_id_value', - tag_template_field=tags.TagTemplateField(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].tag_template_field_id - mock_val = 'tag_template_field_id_value' - assert arg == mock_val - arg = args[0].tag_template_field - mock_val = tags.TagTemplateField(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_tag_template_field_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_tag_template_field( - datacatalog.CreateTagTemplateFieldRequest(), - parent='parent_value', - tag_template_field_id='tag_template_field_id_value', - tag_template_field=tags.TagTemplateField(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.UpdateTagTemplateFieldRequest, - dict, -]) -def test_update_tag_template_field(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField( - name='name_value', - display_name='display_name_value', - is_required=True, - description='description_value', - order=540, - ) - response = client.update_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplateField) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.is_required is True - assert response.description == 'description_value' - assert response.order == 540 - - -def test_update_tag_template_field_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template_field), - '__call__') as call: - client.update_tag_template_field() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() - -@pytest.mark.asyncio -async def test_update_tag_template_field_async(transport: str = 'grpc_asyncio', request_type=datacatalog.UpdateTagTemplateFieldRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField( - name='name_value', - display_name='display_name_value', - is_required=True, - description='description_value', - order=540, - )) - response = await client.update_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplateField) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.is_required is True - assert response.description == 'description_value' - assert response.order == 540 - - -@pytest.mark.asyncio -async def test_update_tag_template_field_async_from_dict(): - await test_update_tag_template_field_async(request_type=dict) - - -def test_update_tag_template_field_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.UpdateTagTemplateFieldRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template_field), - '__call__') as call: - call.return_value = tags.TagTemplateField() - client.update_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_tag_template_field_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.UpdateTagTemplateFieldRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template_field), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) - await client.update_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_update_tag_template_field_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_tag_template_field( - name='name_value', - tag_template_field=tags.TagTemplateField(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].tag_template_field - mock_val = tags.TagTemplateField(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_tag_template_field_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_tag_template_field( - datacatalog.UpdateTagTemplateFieldRequest(), - name='name_value', - tag_template_field=tags.TagTemplateField(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_tag_template_field_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_tag_template_field( - name='name_value', - tag_template_field=tags.TagTemplateField(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].tag_template_field - mock_val = tags.TagTemplateField(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_tag_template_field_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_tag_template_field( - datacatalog.UpdateTagTemplateFieldRequest(), - name='name_value', - tag_template_field=tags.TagTemplateField(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.RenameTagTemplateFieldRequest, - dict, -]) -def test_rename_tag_template_field(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField( - name='name_value', - display_name='display_name_value', - is_required=True, - description='description_value', - order=540, - ) - response = client.rename_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.RenameTagTemplateFieldRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplateField) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.is_required is True - assert response.description == 'description_value' - assert response.order == 540 - - -def test_rename_tag_template_field_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field), - '__call__') as call: - client.rename_tag_template_field() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.RenameTagTemplateFieldRequest() - -@pytest.mark.asyncio -async def test_rename_tag_template_field_async(transport: str = 'grpc_asyncio', request_type=datacatalog.RenameTagTemplateFieldRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField( - name='name_value', - display_name='display_name_value', - is_required=True, - description='description_value', - order=540, - )) - response = await client.rename_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.RenameTagTemplateFieldRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplateField) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.is_required is True - assert response.description == 'description_value' - assert response.order == 540 - - -@pytest.mark.asyncio -async def test_rename_tag_template_field_async_from_dict(): - await test_rename_tag_template_field_async(request_type=dict) - - -def test_rename_tag_template_field_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.RenameTagTemplateFieldRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field), - '__call__') as call: - call.return_value = tags.TagTemplateField() - client.rename_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_rename_tag_template_field_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.RenameTagTemplateFieldRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) - await client.rename_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_rename_tag_template_field_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.rename_tag_template_field( - name='name_value', - new_tag_template_field_id='new_tag_template_field_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].new_tag_template_field_id - mock_val = 'new_tag_template_field_id_value' - assert arg == mock_val - - -def test_rename_tag_template_field_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rename_tag_template_field( - datacatalog.RenameTagTemplateFieldRequest(), - name='name_value', - new_tag_template_field_id='new_tag_template_field_id_value', - ) - -@pytest.mark.asyncio -async def test_rename_tag_template_field_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.rename_tag_template_field( - name='name_value', - new_tag_template_field_id='new_tag_template_field_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].new_tag_template_field_id - mock_val = 'new_tag_template_field_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_rename_tag_template_field_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.rename_tag_template_field( - datacatalog.RenameTagTemplateFieldRequest(), - name='name_value', - new_tag_template_field_id='new_tag_template_field_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.RenameTagTemplateFieldEnumValueRequest, - dict, -]) -def test_rename_tag_template_field_enum_value(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field_enum_value), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField( - name='name_value', - display_name='display_name_value', - is_required=True, - description='description_value', - order=540, - ) - response = client.rename_tag_template_field_enum_value(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.RenameTagTemplateFieldEnumValueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplateField) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.is_required is True - assert response.description == 'description_value' - assert response.order == 540 - - -def test_rename_tag_template_field_enum_value_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field_enum_value), - '__call__') as call: - client.rename_tag_template_field_enum_value() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.RenameTagTemplateFieldEnumValueRequest() - -@pytest.mark.asyncio -async def test_rename_tag_template_field_enum_value_async(transport: str = 'grpc_asyncio', request_type=datacatalog.RenameTagTemplateFieldEnumValueRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field_enum_value), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField( - name='name_value', - display_name='display_name_value', - is_required=True, - description='description_value', - order=540, - )) - response = await client.rename_tag_template_field_enum_value(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.RenameTagTemplateFieldEnumValueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplateField) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.is_required is True - assert response.description == 'description_value' - assert response.order == 540 - - -@pytest.mark.asyncio -async def test_rename_tag_template_field_enum_value_async_from_dict(): - await test_rename_tag_template_field_enum_value_async(request_type=dict) - - -def test_rename_tag_template_field_enum_value_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.RenameTagTemplateFieldEnumValueRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field_enum_value), - '__call__') as call: - call.return_value = tags.TagTemplateField() - client.rename_tag_template_field_enum_value(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_rename_tag_template_field_enum_value_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.RenameTagTemplateFieldEnumValueRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field_enum_value), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) - await client.rename_tag_template_field_enum_value(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_rename_tag_template_field_enum_value_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field_enum_value), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.rename_tag_template_field_enum_value( - name='name_value', - new_enum_value_display_name='new_enum_value_display_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].new_enum_value_display_name - mock_val = 'new_enum_value_display_name_value' - assert arg == mock_val - - -def test_rename_tag_template_field_enum_value_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rename_tag_template_field_enum_value( - datacatalog.RenameTagTemplateFieldEnumValueRequest(), - name='name_value', - new_enum_value_display_name='new_enum_value_display_name_value', - ) - -@pytest.mark.asyncio -async def test_rename_tag_template_field_enum_value_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field_enum_value), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.rename_tag_template_field_enum_value( - name='name_value', - new_enum_value_display_name='new_enum_value_display_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].new_enum_value_display_name - mock_val = 'new_enum_value_display_name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_rename_tag_template_field_enum_value_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.rename_tag_template_field_enum_value( - datacatalog.RenameTagTemplateFieldEnumValueRequest(), - name='name_value', - new_enum_value_display_name='new_enum_value_display_name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.DeleteTagTemplateFieldRequest, - dict, -]) -def test_delete_tag_template_field(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateFieldRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_tag_template_field_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template_field), - '__call__') as call: - client.delete_tag_template_field() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateFieldRequest() - -@pytest.mark.asyncio -async def test_delete_tag_template_field_async(transport: str = 'grpc_asyncio', request_type=datacatalog.DeleteTagTemplateFieldRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateFieldRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_tag_template_field_async_from_dict(): - await test_delete_tag_template_field_async(request_type=dict) - - -def test_delete_tag_template_field_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.DeleteTagTemplateFieldRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template_field), - '__call__') as call: - call.return_value = None - client.delete_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_tag_template_field_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.DeleteTagTemplateFieldRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template_field), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_tag_template_field_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_tag_template_field( - name='name_value', - force=True, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].force - mock_val = True - assert arg == mock_val - - -def test_delete_tag_template_field_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_tag_template_field( - datacatalog.DeleteTagTemplateFieldRequest(), - name='name_value', - force=True, - ) - -@pytest.mark.asyncio -async def test_delete_tag_template_field_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_tag_template_field( - name='name_value', - force=True, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].force - mock_val = True - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_tag_template_field_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_tag_template_field( - datacatalog.DeleteTagTemplateFieldRequest(), - name='name_value', - force=True, - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.CreateTagRequest, - dict, -]) -def test_create_tag(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.Tag( - name='name_value', - template='template_value', - template_display_name='template_display_name_value', - column='column_value', - ) - response = client.create_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.Tag) - assert response.name == 'name_value' - assert response.template == 'template_value' - assert response.template_display_name == 'template_display_name_value' - - -def test_create_tag_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag), - '__call__') as call: - client.create_tag() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagRequest() - -@pytest.mark.asyncio -async def test_create_tag_async(transport: str = 'grpc_asyncio', request_type=datacatalog.CreateTagRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag( - name='name_value', - template='template_value', - template_display_name='template_display_name_value', - )) - response = await client.create_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.Tag) - assert response.name == 'name_value' - assert response.template == 'template_value' - assert response.template_display_name == 'template_display_name_value' - - -@pytest.mark.asyncio -async def test_create_tag_async_from_dict(): - await test_create_tag_async(request_type=dict) - - -def test_create_tag_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.CreateTagRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag), - '__call__') as call: - call.return_value = tags.Tag() - client.create_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_tag_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.CreateTagRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) - await client.create_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_tag_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.Tag() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_tag( - parent='parent_value', - tag=tags.Tag(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].tag - mock_val = tags.Tag(name='name_value') - assert arg == mock_val - - -def test_create_tag_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_tag( - datacatalog.CreateTagRequest(), - parent='parent_value', - tag=tags.Tag(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_tag_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.Tag() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_tag( - parent='parent_value', - tag=tags.Tag(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].tag - mock_val = tags.Tag(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_tag_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_tag( - datacatalog.CreateTagRequest(), - parent='parent_value', - tag=tags.Tag(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.UpdateTagRequest, - dict, -]) -def test_update_tag(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.Tag( - name='name_value', - template='template_value', - template_display_name='template_display_name_value', - column='column_value', - ) - response = client.update_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.Tag) - assert response.name == 'name_value' - assert response.template == 'template_value' - assert response.template_display_name == 'template_display_name_value' - - -def test_update_tag_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag), - '__call__') as call: - client.update_tag() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagRequest() - -@pytest.mark.asyncio -async def test_update_tag_async(transport: str = 'grpc_asyncio', request_type=datacatalog.UpdateTagRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag( - name='name_value', - template='template_value', - template_display_name='template_display_name_value', - )) - response = await client.update_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.Tag) - assert response.name == 'name_value' - assert response.template == 'template_value' - assert response.template_display_name == 'template_display_name_value' - - -@pytest.mark.asyncio -async def test_update_tag_async_from_dict(): - await test_update_tag_async(request_type=dict) - - -def test_update_tag_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.UpdateTagRequest() - - request.tag.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag), - '__call__') as call: - call.return_value = tags.Tag() - client.update_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'tag.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_tag_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.UpdateTagRequest() - - request.tag.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) - await client.update_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'tag.name=name_value', - ) in kw['metadata'] - - -def test_update_tag_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.Tag() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_tag( - tag=tags.Tag(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].tag - mock_val = tags.Tag(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_tag_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_tag( - datacatalog.UpdateTagRequest(), - tag=tags.Tag(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_tag_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.Tag() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_tag( - tag=tags.Tag(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].tag - mock_val = tags.Tag(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_tag_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_tag( - datacatalog.UpdateTagRequest(), - tag=tags.Tag(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.DeleteTagRequest, - dict, -]) -def test_delete_tag(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_tag_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag), - '__call__') as call: - client.delete_tag() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagRequest() - -@pytest.mark.asyncio -async def test_delete_tag_async(transport: str = 'grpc_asyncio', request_type=datacatalog.DeleteTagRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_tag_async_from_dict(): - await test_delete_tag_async(request_type=dict) - - -def test_delete_tag_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.DeleteTagRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag), - '__call__') as call: - call.return_value = None - client.delete_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_tag_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.DeleteTagRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_tag_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_tag( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_tag_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_tag( - datacatalog.DeleteTagRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_tag_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_tag( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_tag_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_tag( - datacatalog.DeleteTagRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.ListTagsRequest, - dict, -]) -def test_list_tags(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.ListTagsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_tags(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListTagsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTagsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_tags_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), - '__call__') as call: - client.list_tags() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListTagsRequest() - -@pytest.mark.asyncio -async def test_list_tags_async(transport: str = 'grpc_asyncio', request_type=datacatalog.ListTagsRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListTagsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_tags(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListTagsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTagsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_tags_async_from_dict(): - await test_list_tags_async(request_type=dict) - - -def test_list_tags_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.ListTagsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), - '__call__') as call: - call.return_value = datacatalog.ListTagsResponse() - client.list_tags(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_tags_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.ListTagsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListTagsResponse()) - await client.list_tags(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_tags_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.ListTagsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_tags( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_tags_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_tags( - datacatalog.ListTagsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_tags_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.ListTagsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListTagsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_tags( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_tags_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_tags( - datacatalog.ListTagsRequest(), - parent='parent_value', - ) - - -def test_list_tags_pager(transport_name: str = "grpc"): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - tags.Tag(), - tags.Tag(), - ], - next_page_token='abc', - ), - datacatalog.ListTagsResponse( - tags=[], - next_page_token='def', - ), - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - ], - next_page_token='ghi', - ), - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - tags.Tag(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_tags(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, tags.Tag) - for i in results) -def test_list_tags_pages(transport_name: str = "grpc"): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - tags.Tag(), - tags.Tag(), - ], - next_page_token='abc', - ), - datacatalog.ListTagsResponse( - tags=[], - next_page_token='def', - ), - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - ], - next_page_token='ghi', - ), - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - tags.Tag(), - ], - ), - RuntimeError, - ) - pages = list(client.list_tags(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_tags_async_pager(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - tags.Tag(), - tags.Tag(), - ], - next_page_token='abc', - ), - datacatalog.ListTagsResponse( - tags=[], - next_page_token='def', - ), - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - ], - next_page_token='ghi', - ), - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - tags.Tag(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_tags(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, tags.Tag) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_tags_async_pages(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - tags.Tag(), - tags.Tag(), - ], - next_page_token='abc', - ), - datacatalog.ListTagsResponse( - tags=[], - next_page_token='def', - ), - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - ], - next_page_token='ghi', - ), - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - tags.Tag(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_tags(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - datacatalog.ReconcileTagsRequest, - dict, -]) -def test_reconcile_tags(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reconcile_tags), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.reconcile_tags(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ReconcileTagsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_reconcile_tags_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reconcile_tags), - '__call__') as call: - client.reconcile_tags() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ReconcileTagsRequest() - -@pytest.mark.asyncio -async def test_reconcile_tags_async(transport: str = 'grpc_asyncio', request_type=datacatalog.ReconcileTagsRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reconcile_tags), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.reconcile_tags(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ReconcileTagsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_reconcile_tags_async_from_dict(): - await test_reconcile_tags_async(request_type=dict) - - -def test_reconcile_tags_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.ReconcileTagsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reconcile_tags), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.reconcile_tags(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_reconcile_tags_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.ReconcileTagsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reconcile_tags), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.reconcile_tags(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - datacatalog.StarEntryRequest, - dict, -]) -def test_star_entry(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.star_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.StarEntryResponse( - ) - response = client.star_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.StarEntryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.StarEntryResponse) - - -def test_star_entry_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.star_entry), - '__call__') as call: - client.star_entry() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.StarEntryRequest() - -@pytest.mark.asyncio -async def test_star_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.StarEntryRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.star_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.StarEntryResponse( - )) - response = await client.star_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.StarEntryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.StarEntryResponse) - - -@pytest.mark.asyncio -async def test_star_entry_async_from_dict(): - await test_star_entry_async(request_type=dict) - - -def test_star_entry_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.StarEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.star_entry), - '__call__') as call: - call.return_value = datacatalog.StarEntryResponse() - client.star_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_star_entry_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.StarEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.star_entry), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.StarEntryResponse()) - await client.star_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_star_entry_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.star_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.StarEntryResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.star_entry( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_star_entry_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.star_entry( - datacatalog.StarEntryRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_star_entry_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.star_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.StarEntryResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.StarEntryResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.star_entry( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_star_entry_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.star_entry( - datacatalog.StarEntryRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.UnstarEntryRequest, - dict, -]) -def test_unstar_entry(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.unstar_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.UnstarEntryResponse( - ) - response = client.unstar_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UnstarEntryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.UnstarEntryResponse) - - -def test_unstar_entry_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.unstar_entry), - '__call__') as call: - client.unstar_entry() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UnstarEntryRequest() - -@pytest.mark.asyncio -async def test_unstar_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.UnstarEntryRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.unstar_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.UnstarEntryResponse( - )) - response = await client.unstar_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UnstarEntryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.UnstarEntryResponse) - - -@pytest.mark.asyncio -async def test_unstar_entry_async_from_dict(): - await test_unstar_entry_async(request_type=dict) - - -def test_unstar_entry_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.UnstarEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.unstar_entry), - '__call__') as call: - call.return_value = datacatalog.UnstarEntryResponse() - client.unstar_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_unstar_entry_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.UnstarEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.unstar_entry), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.UnstarEntryResponse()) - await client.unstar_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_unstar_entry_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.unstar_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.UnstarEntryResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.unstar_entry( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_unstar_entry_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.unstar_entry( - datacatalog.UnstarEntryRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_unstar_entry_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.unstar_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.UnstarEntryResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.UnstarEntryResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.unstar_entry( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_unstar_entry_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.unstar_entry( - datacatalog.UnstarEntryRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_set_iam_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - client.set_iam_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) - - -def test_set_iam_policy_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_set_iam_policy_from_dict_foreign(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy(request={ - 'resource': 'resource_value', - 'policy': policy_pb2.Policy(version=774), - 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), - } - ) - call.assert_called() - - -def test_set_iam_policy_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - - -def test_set_iam_policy_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_get_iam_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - client.get_iam_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) - - -def test_get_iam_policy_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_get_iam_policy_from_dict_foreign(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy(request={ - 'resource': 'resource_value', - 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -def test_get_iam_policy_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - - -def test_get_iam_policy_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -def test_test_iam_permissions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - client.test_iam_permissions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_from_dict(): - await test_test_iam_permissions_async(request_type=dict) - - -def test_test_iam_permissions_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_test_iam_permissions_from_dict_foreign(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - response = client.test_iam_permissions(request={ - 'resource': 'resource_value', - 'permissions': ['permissions_value'], - } - ) - call.assert_called() - - -@pytest.mark.parametrize("request_type", [ - datacatalog.ImportEntriesRequest, - dict, -]) -def test_import_entries(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.import_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ImportEntriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_import_entries_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_entries), - '__call__') as call: - client.import_entries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ImportEntriesRequest() - -@pytest.mark.asyncio -async def test_import_entries_async(transport: str = 'grpc_asyncio', request_type=datacatalog.ImportEntriesRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.import_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ImportEntriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_import_entries_async_from_dict(): - await test_import_entries_async(request_type=dict) - - -def test_import_entries_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.ImportEntriesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_entries), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.import_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_import_entries_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.ImportEntriesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_entries), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.import_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DataCatalogGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DataCatalogGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataCatalogClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.DataCatalogGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataCatalogClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataCatalogClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.DataCatalogGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataCatalogClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataCatalogGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DataCatalogClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataCatalogGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.DataCatalogGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.DataCatalogGrpcTransport, - transports.DataCatalogGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", -]) -def test_transport_kind(transport_name): - transport = DataCatalogClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DataCatalogGrpcTransport, - ) - -def test_data_catalog_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DataCatalogTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_data_catalog_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.datacatalog_v1.services.data_catalog.transports.DataCatalogTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.DataCatalogTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'search_catalog', - 'create_entry_group', - 'get_entry_group', - 'update_entry_group', - 'delete_entry_group', - 'list_entry_groups', - 'create_entry', - 'update_entry', - 'delete_entry', - 'get_entry', - 'lookup_entry', - 'list_entries', - 'modify_entry_overview', - 'modify_entry_contacts', - 'create_tag_template', - 'get_tag_template', - 'update_tag_template', - 'delete_tag_template', - 'create_tag_template_field', - 'update_tag_template_field', - 'rename_tag_template_field', - 'rename_tag_template_field_enum_value', - 'delete_tag_template_field', - 'create_tag', - 'update_tag', - 'delete_tag', - 'list_tags', - 'reconcile_tags', - 'star_entry', - 'unstar_entry', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'import_entries', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_data_catalog_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.datacatalog_v1.services.data_catalog.transports.DataCatalogTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataCatalogTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_data_catalog_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.datacatalog_v1.services.data_catalog.transports.DataCatalogTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataCatalogTransport() - adc.assert_called_once() - - -def test_data_catalog_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DataCatalogClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataCatalogGrpcTransport, - transports.DataCatalogGrpcAsyncIOTransport, - ], -) -def test_data_catalog_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataCatalogGrpcTransport, - transports.DataCatalogGrpcAsyncIOTransport, - ], -) -def test_data_catalog_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DataCatalogGrpcTransport, grpc_helpers), - (transports.DataCatalogGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_data_catalog_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "datacatalog.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="datacatalog.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport]) -def test_data_catalog_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_data_catalog_host_no_port(transport_name): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'datacatalog.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_data_catalog_host_with_port(transport_name): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'datacatalog.googleapis.com:8000' - ) - -def test_data_catalog_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataCatalogGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_data_catalog_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataCatalogGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport]) -def test_data_catalog_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport]) -def test_data_catalog_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_data_catalog_grpc_lro_client(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_data_catalog_grpc_lro_async_client(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_entry_path(): - project = "squid" - location = "clam" - entry_group = "whelk" - entry = "octopus" - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format(project=project, location=location, entry_group=entry_group, entry=entry, ) - actual = DataCatalogClient.entry_path(project, location, entry_group, entry) - assert expected == actual - - -def test_parse_entry_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "entry_group": "cuttlefish", - "entry": "mussel", - } - path = DataCatalogClient.entry_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_entry_path(path) - assert expected == actual - -def test_entry_group_path(): - project = "winkle" - location = "nautilus" - entry_group = "scallop" - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}".format(project=project, location=location, entry_group=entry_group, ) - actual = DataCatalogClient.entry_group_path(project, location, entry_group) - assert expected == actual - - -def test_parse_entry_group_path(): - expected = { - "project": "abalone", - "location": "squid", - "entry_group": "clam", - } - path = DataCatalogClient.entry_group_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_entry_group_path(path) - assert expected == actual - -def test_tag_path(): - project = "whelk" - location = "octopus" - entry_group = "oyster" - entry = "nudibranch" - tag = "cuttlefish" - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format(project=project, location=location, entry_group=entry_group, entry=entry, tag=tag, ) - actual = DataCatalogClient.tag_path(project, location, entry_group, entry, tag) - assert expected == actual - - -def test_parse_tag_path(): - expected = { - "project": "mussel", - "location": "winkle", - "entry_group": "nautilus", - "entry": "scallop", - "tag": "abalone", - } - path = DataCatalogClient.tag_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_tag_path(path) - assert expected == actual - -def test_tag_template_path(): - project = "squid" - location = "clam" - tag_template = "whelk" - expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}".format(project=project, location=location, tag_template=tag_template, ) - actual = DataCatalogClient.tag_template_path(project, location, tag_template) - assert expected == actual - - -def test_parse_tag_template_path(): - expected = { - "project": "octopus", - "location": "oyster", - "tag_template": "nudibranch", - } - path = DataCatalogClient.tag_template_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_tag_template_path(path) - assert expected == actual - -def test_tag_template_field_path(): - project = "cuttlefish" - location = "mussel" - tag_template = "winkle" - field = "nautilus" - expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}".format(project=project, location=location, tag_template=tag_template, field=field, ) - actual = DataCatalogClient.tag_template_field_path(project, location, tag_template, field) - assert expected == actual - - -def test_parse_tag_template_field_path(): - expected = { - "project": "scallop", - "location": "abalone", - "tag_template": "squid", - "field": "clam", - } - path = DataCatalogClient.tag_template_field_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_tag_template_field_path(path) - assert expected == actual - -def test_tag_template_field_enum_value_path(): - project = "whelk" - location = "octopus" - tag_template = "oyster" - tag_template_field_id = "nudibranch" - enum_value_display_name = "cuttlefish" - expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name}".format(project=project, location=location, tag_template=tag_template, tag_template_field_id=tag_template_field_id, enum_value_display_name=enum_value_display_name, ) - actual = DataCatalogClient.tag_template_field_enum_value_path(project, location, tag_template, tag_template_field_id, enum_value_display_name) - assert expected == actual - - -def test_parse_tag_template_field_enum_value_path(): - expected = { - "project": "mussel", - "location": "winkle", - "tag_template": "nautilus", - "tag_template_field_id": "scallop", - "enum_value_display_name": "abalone", - } - path = DataCatalogClient.tag_template_field_enum_value_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_tag_template_field_enum_value_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = DataCatalogClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = DataCatalogClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = DataCatalogClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = DataCatalogClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = DataCatalogClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = DataCatalogClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = DataCatalogClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = DataCatalogClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = DataCatalogClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = DataCatalogClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.DataCatalogTransport, '_prep_wrapped_messages') as prep: - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.DataCatalogTransport, '_prep_wrapped_messages') as prep: - transport_class = DataCatalogClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_delete_operation(transport: str = "grpc"): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc"): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (DataCatalogClient, transports.DataCatalogGrpcTransport), - (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py b/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py deleted file mode 100644 index 1e3777f4c4fc..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py +++ /dev/null @@ -1,5041 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.datacatalog_v1.services.policy_tag_manager import PolicyTagManagerAsyncClient -from google.cloud.datacatalog_v1.services.policy_tag_manager import PolicyTagManagerClient -from google.cloud.datacatalog_v1.services.policy_tag_manager import pagers -from google.cloud.datacatalog_v1.services.policy_tag_manager import transports -from google.cloud.datacatalog_v1.types import common -from google.cloud.datacatalog_v1.types import policytagmanager -from google.cloud.datacatalog_v1.types import timestamps -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert PolicyTagManagerClient._get_default_mtls_endpoint(None) is None - assert PolicyTagManagerClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert PolicyTagManagerClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert PolicyTagManagerClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert PolicyTagManagerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert PolicyTagManagerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (PolicyTagManagerClient, "grpc"), - (PolicyTagManagerAsyncClient, "grpc_asyncio"), -]) -def test_policy_tag_manager_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'datacatalog.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.PolicyTagManagerGrpcTransport, "grpc"), - (transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_policy_tag_manager_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (PolicyTagManagerClient, "grpc"), - (PolicyTagManagerAsyncClient, "grpc_asyncio"), -]) -def test_policy_tag_manager_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'datacatalog.googleapis.com:443' - ) - - -def test_policy_tag_manager_client_get_transport_class(): - transport = PolicyTagManagerClient.get_transport_class() - available_transports = [ - transports.PolicyTagManagerGrpcTransport, - ] - assert transport in available_transports - - transport = PolicyTagManagerClient.get_transport_class("grpc") - assert transport == transports.PolicyTagManagerGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc"), - (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(PolicyTagManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerClient)) -@mock.patch.object(PolicyTagManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerAsyncClient)) -def test_policy_tag_manager_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(PolicyTagManagerClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(PolicyTagManagerClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc", "true"), - (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc", "false"), - (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(PolicyTagManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerClient)) -@mock.patch.object(PolicyTagManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_policy_tag_manager_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - PolicyTagManagerClient, PolicyTagManagerAsyncClient -]) -@mock.patch.object(PolicyTagManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerClient)) -@mock.patch.object(PolicyTagManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerAsyncClient)) -def test_policy_tag_manager_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc"), - (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_policy_tag_manager_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc", grpc_helpers), - (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_policy_tag_manager_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_policy_tag_manager_client_client_options_from_dict(): - with mock.patch('google.cloud.datacatalog_v1.services.policy_tag_manager.transports.PolicyTagManagerGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = PolicyTagManagerClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc", grpc_helpers), - (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_policy_tag_manager_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "datacatalog.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="datacatalog.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - policytagmanager.CreateTaxonomyRequest, - dict, -]) -def test_create_taxonomy(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.Taxonomy( - name='name_value', - display_name='display_name_value', - description='description_value', - policy_tag_count=1715, - activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], - ) - response = client.create_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.CreateTaxonomyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.Taxonomy) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.policy_tag_count == 1715 - assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] - - -def test_create_taxonomy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_taxonomy), - '__call__') as call: - client.create_taxonomy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.CreateTaxonomyRequest() - -@pytest.mark.asyncio -async def test_create_taxonomy_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.CreateTaxonomyRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy( - name='name_value', - display_name='display_name_value', - description='description_value', - policy_tag_count=1715, - activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], - )) - response = await client.create_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.CreateTaxonomyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.Taxonomy) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.policy_tag_count == 1715 - assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] - - -@pytest.mark.asyncio -async def test_create_taxonomy_async_from_dict(): - await test_create_taxonomy_async(request_type=dict) - - -def test_create_taxonomy_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.CreateTaxonomyRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_taxonomy), - '__call__') as call: - call.return_value = policytagmanager.Taxonomy() - client.create_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_taxonomy_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.CreateTaxonomyRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_taxonomy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) - await client.create_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_taxonomy_flattened(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.Taxonomy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_taxonomy( - parent='parent_value', - taxonomy=policytagmanager.Taxonomy(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].taxonomy - mock_val = policytagmanager.Taxonomy(name='name_value') - assert arg == mock_val - - -def test_create_taxonomy_flattened_error(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_taxonomy( - policytagmanager.CreateTaxonomyRequest(), - parent='parent_value', - taxonomy=policytagmanager.Taxonomy(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_taxonomy_flattened_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.Taxonomy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_taxonomy( - parent='parent_value', - taxonomy=policytagmanager.Taxonomy(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].taxonomy - mock_val = policytagmanager.Taxonomy(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_taxonomy_flattened_error_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_taxonomy( - policytagmanager.CreateTaxonomyRequest(), - parent='parent_value', - taxonomy=policytagmanager.Taxonomy(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - policytagmanager.DeleteTaxonomyRequest, - dict, -]) -def test_delete_taxonomy(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.DeleteTaxonomyRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_taxonomy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_taxonomy), - '__call__') as call: - client.delete_taxonomy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.DeleteTaxonomyRequest() - -@pytest.mark.asyncio -async def test_delete_taxonomy_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.DeleteTaxonomyRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.DeleteTaxonomyRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_taxonomy_async_from_dict(): - await test_delete_taxonomy_async(request_type=dict) - - -def test_delete_taxonomy_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.DeleteTaxonomyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_taxonomy), - '__call__') as call: - call.return_value = None - client.delete_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_taxonomy_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.DeleteTaxonomyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_taxonomy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_taxonomy_flattened(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_taxonomy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_taxonomy_flattened_error(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_taxonomy( - policytagmanager.DeleteTaxonomyRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_taxonomy_flattened_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_taxonomy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_taxonomy_flattened_error_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_taxonomy( - policytagmanager.DeleteTaxonomyRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - policytagmanager.UpdateTaxonomyRequest, - dict, -]) -def test_update_taxonomy(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.Taxonomy( - name='name_value', - display_name='display_name_value', - description='description_value', - policy_tag_count=1715, - activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], - ) - response = client.update_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.UpdateTaxonomyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.Taxonomy) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.policy_tag_count == 1715 - assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] - - -def test_update_taxonomy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_taxonomy), - '__call__') as call: - client.update_taxonomy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.UpdateTaxonomyRequest() - -@pytest.mark.asyncio -async def test_update_taxonomy_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.UpdateTaxonomyRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy( - name='name_value', - display_name='display_name_value', - description='description_value', - policy_tag_count=1715, - activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], - )) - response = await client.update_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.UpdateTaxonomyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.Taxonomy) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.policy_tag_count == 1715 - assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] - - -@pytest.mark.asyncio -async def test_update_taxonomy_async_from_dict(): - await test_update_taxonomy_async(request_type=dict) - - -def test_update_taxonomy_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.UpdateTaxonomyRequest() - - request.taxonomy.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_taxonomy), - '__call__') as call: - call.return_value = policytagmanager.Taxonomy() - client.update_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'taxonomy.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_taxonomy_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.UpdateTaxonomyRequest() - - request.taxonomy.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_taxonomy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) - await client.update_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'taxonomy.name=name_value', - ) in kw['metadata'] - - -def test_update_taxonomy_flattened(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.Taxonomy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_taxonomy( - taxonomy=policytagmanager.Taxonomy(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].taxonomy - mock_val = policytagmanager.Taxonomy(name='name_value') - assert arg == mock_val - - -def test_update_taxonomy_flattened_error(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_taxonomy( - policytagmanager.UpdateTaxonomyRequest(), - taxonomy=policytagmanager.Taxonomy(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_update_taxonomy_flattened_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.Taxonomy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_taxonomy( - taxonomy=policytagmanager.Taxonomy(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].taxonomy - mock_val = policytagmanager.Taxonomy(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_taxonomy_flattened_error_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_taxonomy( - policytagmanager.UpdateTaxonomyRequest(), - taxonomy=policytagmanager.Taxonomy(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - policytagmanager.ListTaxonomiesRequest, - dict, -]) -def test_list_taxonomies(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.ListTaxonomiesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.ListTaxonomiesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTaxonomiesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_taxonomies_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_taxonomies), - '__call__') as call: - client.list_taxonomies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.ListTaxonomiesRequest() - -@pytest.mark.asyncio -async def test_list_taxonomies_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.ListTaxonomiesRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListTaxonomiesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.ListTaxonomiesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTaxonomiesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_taxonomies_async_from_dict(): - await test_list_taxonomies_async(request_type=dict) - - -def test_list_taxonomies_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.ListTaxonomiesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_taxonomies), - '__call__') as call: - call.return_value = policytagmanager.ListTaxonomiesResponse() - client.list_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_taxonomies_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.ListTaxonomiesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_taxonomies), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListTaxonomiesResponse()) - await client.list_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_taxonomies_flattened(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.ListTaxonomiesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_taxonomies( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_taxonomies_flattened_error(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_taxonomies( - policytagmanager.ListTaxonomiesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_taxonomies_flattened_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.ListTaxonomiesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListTaxonomiesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_taxonomies( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_taxonomies_flattened_error_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_taxonomies( - policytagmanager.ListTaxonomiesRequest(), - parent='parent_value', - ) - - -def test_list_taxonomies_pager(transport_name: str = "grpc"): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_taxonomies), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - ], - next_page_token='abc', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[], - next_page_token='def', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - ], - next_page_token='ghi', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_taxonomies(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, policytagmanager.Taxonomy) - for i in results) -def test_list_taxonomies_pages(transport_name: str = "grpc"): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_taxonomies), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - ], - next_page_token='abc', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[], - next_page_token='def', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - ], - next_page_token='ghi', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - ], - ), - RuntimeError, - ) - pages = list(client.list_taxonomies(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_taxonomies_async_pager(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_taxonomies), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - ], - next_page_token='abc', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[], - next_page_token='def', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - ], - next_page_token='ghi', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_taxonomies(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, policytagmanager.Taxonomy) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_taxonomies_async_pages(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_taxonomies), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - ], - next_page_token='abc', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[], - next_page_token='def', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - ], - next_page_token='ghi', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_taxonomies(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - policytagmanager.GetTaxonomyRequest, - dict, -]) -def test_get_taxonomy(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.Taxonomy( - name='name_value', - display_name='display_name_value', - description='description_value', - policy_tag_count=1715, - activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], - ) - response = client.get_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.GetTaxonomyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.Taxonomy) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.policy_tag_count == 1715 - assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] - - -def test_get_taxonomy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_taxonomy), - '__call__') as call: - client.get_taxonomy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.GetTaxonomyRequest() - -@pytest.mark.asyncio -async def test_get_taxonomy_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.GetTaxonomyRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy( - name='name_value', - display_name='display_name_value', - description='description_value', - policy_tag_count=1715, - activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], - )) - response = await client.get_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.GetTaxonomyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.Taxonomy) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.policy_tag_count == 1715 - assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] - - -@pytest.mark.asyncio -async def test_get_taxonomy_async_from_dict(): - await test_get_taxonomy_async(request_type=dict) - - -def test_get_taxonomy_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.GetTaxonomyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_taxonomy), - '__call__') as call: - call.return_value = policytagmanager.Taxonomy() - client.get_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_taxonomy_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.GetTaxonomyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_taxonomy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) - await client.get_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_taxonomy_flattened(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.Taxonomy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_taxonomy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_taxonomy_flattened_error(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_taxonomy( - policytagmanager.GetTaxonomyRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_taxonomy_flattened_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.Taxonomy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_taxonomy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_taxonomy_flattened_error_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_taxonomy( - policytagmanager.GetTaxonomyRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - policytagmanager.CreatePolicyTagRequest, - dict, -]) -def test_create_policy_tag(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.PolicyTag( - name='name_value', - display_name='display_name_value', - description='description_value', - parent_policy_tag='parent_policy_tag_value', - child_policy_tags=['child_policy_tags_value'], - ) - response = client.create_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.CreatePolicyTagRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.PolicyTag) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent_policy_tag == 'parent_policy_tag_value' - assert response.child_policy_tags == ['child_policy_tags_value'] - - -def test_create_policy_tag_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_policy_tag), - '__call__') as call: - client.create_policy_tag() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.CreatePolicyTagRequest() - -@pytest.mark.asyncio -async def test_create_policy_tag_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.CreatePolicyTagRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag( - name='name_value', - display_name='display_name_value', - description='description_value', - parent_policy_tag='parent_policy_tag_value', - child_policy_tags=['child_policy_tags_value'], - )) - response = await client.create_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.CreatePolicyTagRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.PolicyTag) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent_policy_tag == 'parent_policy_tag_value' - assert response.child_policy_tags == ['child_policy_tags_value'] - - -@pytest.mark.asyncio -async def test_create_policy_tag_async_from_dict(): - await test_create_policy_tag_async(request_type=dict) - - -def test_create_policy_tag_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.CreatePolicyTagRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_policy_tag), - '__call__') as call: - call.return_value = policytagmanager.PolicyTag() - client.create_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_policy_tag_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.CreatePolicyTagRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_policy_tag), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) - await client.create_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_policy_tag_flattened(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.PolicyTag() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_policy_tag( - parent='parent_value', - policy_tag=policytagmanager.PolicyTag(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].policy_tag - mock_val = policytagmanager.PolicyTag(name='name_value') - assert arg == mock_val - - -def test_create_policy_tag_flattened_error(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_policy_tag( - policytagmanager.CreatePolicyTagRequest(), - parent='parent_value', - policy_tag=policytagmanager.PolicyTag(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_policy_tag_flattened_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.PolicyTag() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_policy_tag( - parent='parent_value', - policy_tag=policytagmanager.PolicyTag(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].policy_tag - mock_val = policytagmanager.PolicyTag(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_policy_tag_flattened_error_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_policy_tag( - policytagmanager.CreatePolicyTagRequest(), - parent='parent_value', - policy_tag=policytagmanager.PolicyTag(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - policytagmanager.DeletePolicyTagRequest, - dict, -]) -def test_delete_policy_tag(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.DeletePolicyTagRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_policy_tag_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_policy_tag), - '__call__') as call: - client.delete_policy_tag() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.DeletePolicyTagRequest() - -@pytest.mark.asyncio -async def test_delete_policy_tag_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.DeletePolicyTagRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.DeletePolicyTagRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_policy_tag_async_from_dict(): - await test_delete_policy_tag_async(request_type=dict) - - -def test_delete_policy_tag_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.DeletePolicyTagRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_policy_tag), - '__call__') as call: - call.return_value = None - client.delete_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_policy_tag_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.DeletePolicyTagRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_policy_tag), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_policy_tag_flattened(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_policy_tag( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_policy_tag_flattened_error(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_policy_tag( - policytagmanager.DeletePolicyTagRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_policy_tag_flattened_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_policy_tag( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_policy_tag_flattened_error_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_policy_tag( - policytagmanager.DeletePolicyTagRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - policytagmanager.UpdatePolicyTagRequest, - dict, -]) -def test_update_policy_tag(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.PolicyTag( - name='name_value', - display_name='display_name_value', - description='description_value', - parent_policy_tag='parent_policy_tag_value', - child_policy_tags=['child_policy_tags_value'], - ) - response = client.update_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.UpdatePolicyTagRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.PolicyTag) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent_policy_tag == 'parent_policy_tag_value' - assert response.child_policy_tags == ['child_policy_tags_value'] - - -def test_update_policy_tag_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_policy_tag), - '__call__') as call: - client.update_policy_tag() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.UpdatePolicyTagRequest() - -@pytest.mark.asyncio -async def test_update_policy_tag_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.UpdatePolicyTagRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag( - name='name_value', - display_name='display_name_value', - description='description_value', - parent_policy_tag='parent_policy_tag_value', - child_policy_tags=['child_policy_tags_value'], - )) - response = await client.update_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.UpdatePolicyTagRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.PolicyTag) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent_policy_tag == 'parent_policy_tag_value' - assert response.child_policy_tags == ['child_policy_tags_value'] - - -@pytest.mark.asyncio -async def test_update_policy_tag_async_from_dict(): - await test_update_policy_tag_async(request_type=dict) - - -def test_update_policy_tag_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.UpdatePolicyTagRequest() - - request.policy_tag.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_policy_tag), - '__call__') as call: - call.return_value = policytagmanager.PolicyTag() - client.update_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'policy_tag.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_policy_tag_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.UpdatePolicyTagRequest() - - request.policy_tag.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_policy_tag), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) - await client.update_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'policy_tag.name=name_value', - ) in kw['metadata'] - - -def test_update_policy_tag_flattened(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.PolicyTag() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_policy_tag( - policy_tag=policytagmanager.PolicyTag(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].policy_tag - mock_val = policytagmanager.PolicyTag(name='name_value') - assert arg == mock_val - - -def test_update_policy_tag_flattened_error(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_policy_tag( - policytagmanager.UpdatePolicyTagRequest(), - policy_tag=policytagmanager.PolicyTag(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_update_policy_tag_flattened_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.PolicyTag() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_policy_tag( - policy_tag=policytagmanager.PolicyTag(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].policy_tag - mock_val = policytagmanager.PolicyTag(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_policy_tag_flattened_error_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_policy_tag( - policytagmanager.UpdatePolicyTagRequest(), - policy_tag=policytagmanager.PolicyTag(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - policytagmanager.ListPolicyTagsRequest, - dict, -]) -def test_list_policy_tags(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_policy_tags), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.ListPolicyTagsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_policy_tags(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.ListPolicyTagsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPolicyTagsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_policy_tags_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_policy_tags), - '__call__') as call: - client.list_policy_tags() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.ListPolicyTagsRequest() - -@pytest.mark.asyncio -async def test_list_policy_tags_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.ListPolicyTagsRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_policy_tags), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListPolicyTagsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_policy_tags(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.ListPolicyTagsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPolicyTagsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_policy_tags_async_from_dict(): - await test_list_policy_tags_async(request_type=dict) - - -def test_list_policy_tags_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.ListPolicyTagsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_policy_tags), - '__call__') as call: - call.return_value = policytagmanager.ListPolicyTagsResponse() - client.list_policy_tags(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_policy_tags_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.ListPolicyTagsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_policy_tags), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListPolicyTagsResponse()) - await client.list_policy_tags(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_policy_tags_flattened(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_policy_tags), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.ListPolicyTagsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_policy_tags( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_policy_tags_flattened_error(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_policy_tags( - policytagmanager.ListPolicyTagsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_policy_tags_flattened_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_policy_tags), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.ListPolicyTagsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListPolicyTagsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_policy_tags( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_policy_tags_flattened_error_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_policy_tags( - policytagmanager.ListPolicyTagsRequest(), - parent='parent_value', - ) - - -def test_list_policy_tags_pager(transport_name: str = "grpc"): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_policy_tags), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - ], - next_page_token='abc', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[], - next_page_token='def', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - ], - next_page_token='ghi', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_policy_tags(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, policytagmanager.PolicyTag) - for i in results) -def test_list_policy_tags_pages(transport_name: str = "grpc"): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_policy_tags), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - ], - next_page_token='abc', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[], - next_page_token='def', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - ], - next_page_token='ghi', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - ], - ), - RuntimeError, - ) - pages = list(client.list_policy_tags(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_policy_tags_async_pager(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_policy_tags), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - ], - next_page_token='abc', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[], - next_page_token='def', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - ], - next_page_token='ghi', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_policy_tags(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, policytagmanager.PolicyTag) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_policy_tags_async_pages(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_policy_tags), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - ], - next_page_token='abc', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[], - next_page_token='def', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - ], - next_page_token='ghi', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_policy_tags(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - policytagmanager.GetPolicyTagRequest, - dict, -]) -def test_get_policy_tag(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.PolicyTag( - name='name_value', - display_name='display_name_value', - description='description_value', - parent_policy_tag='parent_policy_tag_value', - child_policy_tags=['child_policy_tags_value'], - ) - response = client.get_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.GetPolicyTagRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.PolicyTag) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent_policy_tag == 'parent_policy_tag_value' - assert response.child_policy_tags == ['child_policy_tags_value'] - - -def test_get_policy_tag_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_policy_tag), - '__call__') as call: - client.get_policy_tag() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.GetPolicyTagRequest() - -@pytest.mark.asyncio -async def test_get_policy_tag_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.GetPolicyTagRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag( - name='name_value', - display_name='display_name_value', - description='description_value', - parent_policy_tag='parent_policy_tag_value', - child_policy_tags=['child_policy_tags_value'], - )) - response = await client.get_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.GetPolicyTagRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.PolicyTag) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent_policy_tag == 'parent_policy_tag_value' - assert response.child_policy_tags == ['child_policy_tags_value'] - - -@pytest.mark.asyncio -async def test_get_policy_tag_async_from_dict(): - await test_get_policy_tag_async(request_type=dict) - - -def test_get_policy_tag_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.GetPolicyTagRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_policy_tag), - '__call__') as call: - call.return_value = policytagmanager.PolicyTag() - client.get_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_policy_tag_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.GetPolicyTagRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_policy_tag), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) - await client.get_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_policy_tag_flattened(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.PolicyTag() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_policy_tag( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_policy_tag_flattened_error(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_policy_tag( - policytagmanager.GetPolicyTagRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_policy_tag_flattened_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.PolicyTag() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_policy_tag( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_policy_tag_flattened_error_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_policy_tag( - policytagmanager.GetPolicyTagRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_get_iam_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - client.get_iam_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) - - -def test_get_iam_policy_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_get_iam_policy_from_dict_foreign(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy(request={ - 'resource': 'resource_value', - 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_set_iam_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - client.set_iam_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) - - -def test_set_iam_policy_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_set_iam_policy_from_dict_foreign(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy(request={ - 'resource': 'resource_value', - 'policy': policy_pb2.Policy(version=774), - 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), - } - ) - call.assert_called() - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -def test_test_iam_permissions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - client.test_iam_permissions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_from_dict(): - await test_test_iam_permissions_async(request_type=dict) - - -def test_test_iam_permissions_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_test_iam_permissions_from_dict_foreign(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - response = client.test_iam_permissions(request={ - 'resource': 'resource_value', - 'permissions': ['permissions_value'], - } - ) - call.assert_called() - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.PolicyTagManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.PolicyTagManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = PolicyTagManagerClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.PolicyTagManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = PolicyTagManagerClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = PolicyTagManagerClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.PolicyTagManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = PolicyTagManagerClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.PolicyTagManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = PolicyTagManagerClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.PolicyTagManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.PolicyTagManagerGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.PolicyTagManagerGrpcTransport, - transports.PolicyTagManagerGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", -]) -def test_transport_kind(transport_name): - transport = PolicyTagManagerClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.PolicyTagManagerGrpcTransport, - ) - -def test_policy_tag_manager_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.PolicyTagManagerTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_policy_tag_manager_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.datacatalog_v1.services.policy_tag_manager.transports.PolicyTagManagerTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.PolicyTagManagerTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_taxonomy', - 'delete_taxonomy', - 'update_taxonomy', - 'list_taxonomies', - 'get_taxonomy', - 'create_policy_tag', - 'delete_policy_tag', - 'update_policy_tag', - 'list_policy_tags', - 'get_policy_tag', - 'get_iam_policy', - 'set_iam_policy', - 'test_iam_permissions', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_policy_tag_manager_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.datacatalog_v1.services.policy_tag_manager.transports.PolicyTagManagerTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.PolicyTagManagerTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_policy_tag_manager_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.datacatalog_v1.services.policy_tag_manager.transports.PolicyTagManagerTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.PolicyTagManagerTransport() - adc.assert_called_once() - - -def test_policy_tag_manager_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - PolicyTagManagerClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.PolicyTagManagerGrpcTransport, - transports.PolicyTagManagerGrpcAsyncIOTransport, - ], -) -def test_policy_tag_manager_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.PolicyTagManagerGrpcTransport, - transports.PolicyTagManagerGrpcAsyncIOTransport, - ], -) -def test_policy_tag_manager_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.PolicyTagManagerGrpcTransport, grpc_helpers), - (transports.PolicyTagManagerGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_policy_tag_manager_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "datacatalog.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="datacatalog.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerGrpcTransport, transports.PolicyTagManagerGrpcAsyncIOTransport]) -def test_policy_tag_manager_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_policy_tag_manager_host_no_port(transport_name): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'datacatalog.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_policy_tag_manager_host_with_port(transport_name): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'datacatalog.googleapis.com:8000' - ) - -def test_policy_tag_manager_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.PolicyTagManagerGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_policy_tag_manager_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.PolicyTagManagerGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerGrpcTransport, transports.PolicyTagManagerGrpcAsyncIOTransport]) -def test_policy_tag_manager_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerGrpcTransport, transports.PolicyTagManagerGrpcAsyncIOTransport]) -def test_policy_tag_manager_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_policy_tag_path(): - project = "squid" - location = "clam" - taxonomy = "whelk" - policy_tag = "octopus" - expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}/policyTags/{policy_tag}".format(project=project, location=location, taxonomy=taxonomy, policy_tag=policy_tag, ) - actual = PolicyTagManagerClient.policy_tag_path(project, location, taxonomy, policy_tag) - assert expected == actual - - -def test_parse_policy_tag_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "taxonomy": "cuttlefish", - "policy_tag": "mussel", - } - path = PolicyTagManagerClient.policy_tag_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerClient.parse_policy_tag_path(path) - assert expected == actual - -def test_taxonomy_path(): - project = "winkle" - location = "nautilus" - taxonomy = "scallop" - expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format(project=project, location=location, taxonomy=taxonomy, ) - actual = PolicyTagManagerClient.taxonomy_path(project, location, taxonomy) - assert expected == actual - - -def test_parse_taxonomy_path(): - expected = { - "project": "abalone", - "location": "squid", - "taxonomy": "clam", - } - path = PolicyTagManagerClient.taxonomy_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerClient.parse_taxonomy_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = PolicyTagManagerClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "octopus", - } - path = PolicyTagManagerClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "oyster" - expected = "folders/{folder}".format(folder=folder, ) - actual = PolicyTagManagerClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nudibranch", - } - path = PolicyTagManagerClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization, ) - actual = PolicyTagManagerClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "mussel", - } - path = PolicyTagManagerClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "winkle" - expected = "projects/{project}".format(project=project, ) - actual = PolicyTagManagerClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nautilus", - } - path = PolicyTagManagerClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "scallop" - location = "abalone" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = PolicyTagManagerClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "squid", - "location": "clam", - } - path = PolicyTagManagerClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.PolicyTagManagerTransport, '_prep_wrapped_messages') as prep: - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.PolicyTagManagerTransport, '_prep_wrapped_messages') as prep: - transport_class = PolicyTagManagerClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_delete_operation(transport: str = "grpc"): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc"): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport), - (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py b/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py deleted file mode 100644 index c4e33ec77510..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py +++ /dev/null @@ -1,2144 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.datacatalog_v1.services.policy_tag_manager_serialization import PolicyTagManagerSerializationAsyncClient -from google.cloud.datacatalog_v1.services.policy_tag_manager_serialization import PolicyTagManagerSerializationClient -from google.cloud.datacatalog_v1.services.policy_tag_manager_serialization import transports -from google.cloud.datacatalog_v1.types import policytagmanager -from google.cloud.datacatalog_v1.types import policytagmanagerserialization -from google.cloud.datacatalog_v1.types import timestamps -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(None) is None - assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (PolicyTagManagerSerializationClient, "grpc"), - (PolicyTagManagerSerializationAsyncClient, "grpc_asyncio"), -]) -def test_policy_tag_manager_serialization_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'datacatalog.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.PolicyTagManagerSerializationGrpcTransport, "grpc"), - (transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_policy_tag_manager_serialization_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (PolicyTagManagerSerializationClient, "grpc"), - (PolicyTagManagerSerializationAsyncClient, "grpc_asyncio"), -]) -def test_policy_tag_manager_serialization_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'datacatalog.googleapis.com:443' - ) - - -def test_policy_tag_manager_serialization_client_get_transport_class(): - transport = PolicyTagManagerSerializationClient.get_transport_class() - available_transports = [ - transports.PolicyTagManagerSerializationGrpcTransport, - ] - assert transport in available_transports - - transport = PolicyTagManagerSerializationClient.get_transport_class("grpc") - assert transport == transports.PolicyTagManagerSerializationGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc"), - (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(PolicyTagManagerSerializationClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationClient)) -@mock.patch.object(PolicyTagManagerSerializationAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationAsyncClient)) -def test_policy_tag_manager_serialization_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(PolicyTagManagerSerializationClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(PolicyTagManagerSerializationClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc", "true"), - (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc", "false"), - (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(PolicyTagManagerSerializationClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationClient)) -@mock.patch.object(PolicyTagManagerSerializationAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_policy_tag_manager_serialization_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - PolicyTagManagerSerializationClient, PolicyTagManagerSerializationAsyncClient -]) -@mock.patch.object(PolicyTagManagerSerializationClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationClient)) -@mock.patch.object(PolicyTagManagerSerializationAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationAsyncClient)) -def test_policy_tag_manager_serialization_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc"), - (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_policy_tag_manager_serialization_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc", grpc_helpers), - (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_policy_tag_manager_serialization_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_policy_tag_manager_serialization_client_client_options_from_dict(): - with mock.patch('google.cloud.datacatalog_v1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = PolicyTagManagerSerializationClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc", grpc_helpers), - (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_policy_tag_manager_serialization_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "datacatalog.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="datacatalog.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - policytagmanagerserialization.ReplaceTaxonomyRequest, - dict, -]) -def test_replace_taxonomy(request_type, transport: str = 'grpc'): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.replace_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.Taxonomy( - name='name_value', - display_name='display_name_value', - description='description_value', - policy_tag_count=1715, - activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], - ) - response = client.replace_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanagerserialization.ReplaceTaxonomyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.Taxonomy) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.policy_tag_count == 1715 - assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] - - -def test_replace_taxonomy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.replace_taxonomy), - '__call__') as call: - client.replace_taxonomy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanagerserialization.ReplaceTaxonomyRequest() - -@pytest.mark.asyncio -async def test_replace_taxonomy_async(transport: str = 'grpc_asyncio', request_type=policytagmanagerserialization.ReplaceTaxonomyRequest): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.replace_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy( - name='name_value', - display_name='display_name_value', - description='description_value', - policy_tag_count=1715, - activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], - )) - response = await client.replace_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanagerserialization.ReplaceTaxonomyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.Taxonomy) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.policy_tag_count == 1715 - assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] - - -@pytest.mark.asyncio -async def test_replace_taxonomy_async_from_dict(): - await test_replace_taxonomy_async(request_type=dict) - - -def test_replace_taxonomy_field_headers(): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanagerserialization.ReplaceTaxonomyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.replace_taxonomy), - '__call__') as call: - call.return_value = policytagmanager.Taxonomy() - client.replace_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_replace_taxonomy_field_headers_async(): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanagerserialization.ReplaceTaxonomyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.replace_taxonomy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) - await client.replace_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - policytagmanagerserialization.ImportTaxonomiesRequest, - dict, -]) -def test_import_taxonomies(request_type, transport: str = 'grpc'): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanagerserialization.ImportTaxonomiesResponse( - ) - response = client.import_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanagerserialization.ImportTaxonomiesResponse) - - -def test_import_taxonomies_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_taxonomies), - '__call__') as call: - client.import_taxonomies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest() - -@pytest.mark.asyncio -async def test_import_taxonomies_async(transport: str = 'grpc_asyncio', request_type=policytagmanagerserialization.ImportTaxonomiesRequest): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanagerserialization.ImportTaxonomiesResponse( - )) - response = await client.import_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanagerserialization.ImportTaxonomiesResponse) - - -@pytest.mark.asyncio -async def test_import_taxonomies_async_from_dict(): - await test_import_taxonomies_async(request_type=dict) - - -def test_import_taxonomies_field_headers(): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanagerserialization.ImportTaxonomiesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_taxonomies), - '__call__') as call: - call.return_value = policytagmanagerserialization.ImportTaxonomiesResponse() - client.import_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_import_taxonomies_field_headers_async(): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanagerserialization.ImportTaxonomiesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_taxonomies), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanagerserialization.ImportTaxonomiesResponse()) - await client.import_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - policytagmanagerserialization.ExportTaxonomiesRequest, - dict, -]) -def test_export_taxonomies(request_type, transport: str = 'grpc'): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanagerserialization.ExportTaxonomiesResponse( - ) - response = client.export_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanagerserialization.ExportTaxonomiesResponse) - - -def test_export_taxonomies_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_taxonomies), - '__call__') as call: - client.export_taxonomies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest() - -@pytest.mark.asyncio -async def test_export_taxonomies_async(transport: str = 'grpc_asyncio', request_type=policytagmanagerserialization.ExportTaxonomiesRequest): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanagerserialization.ExportTaxonomiesResponse( - )) - response = await client.export_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanagerserialization.ExportTaxonomiesResponse) - - -@pytest.mark.asyncio -async def test_export_taxonomies_async_from_dict(): - await test_export_taxonomies_async(request_type=dict) - - -def test_export_taxonomies_field_headers(): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanagerserialization.ExportTaxonomiesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_taxonomies), - '__call__') as call: - call.return_value = policytagmanagerserialization.ExportTaxonomiesResponse() - client.export_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_export_taxonomies_field_headers_async(): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanagerserialization.ExportTaxonomiesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_taxonomies), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanagerserialization.ExportTaxonomiesResponse()) - await client.export_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.PolicyTagManagerSerializationGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.PolicyTagManagerSerializationGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = PolicyTagManagerSerializationClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.PolicyTagManagerSerializationGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = PolicyTagManagerSerializationClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = PolicyTagManagerSerializationClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.PolicyTagManagerSerializationGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = PolicyTagManagerSerializationClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.PolicyTagManagerSerializationGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = PolicyTagManagerSerializationClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.PolicyTagManagerSerializationGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.PolicyTagManagerSerializationGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.PolicyTagManagerSerializationGrpcTransport, - transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", -]) -def test_transport_kind(transport_name): - transport = PolicyTagManagerSerializationClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.PolicyTagManagerSerializationGrpcTransport, - ) - -def test_policy_tag_manager_serialization_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.PolicyTagManagerSerializationTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_policy_tag_manager_serialization_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.datacatalog_v1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.PolicyTagManagerSerializationTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'replace_taxonomy', - 'import_taxonomies', - 'export_taxonomies', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_policy_tag_manager_serialization_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.datacatalog_v1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.PolicyTagManagerSerializationTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_policy_tag_manager_serialization_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.datacatalog_v1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.PolicyTagManagerSerializationTransport() - adc.assert_called_once() - - -def test_policy_tag_manager_serialization_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - PolicyTagManagerSerializationClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.PolicyTagManagerSerializationGrpcTransport, - transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, - ], -) -def test_policy_tag_manager_serialization_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.PolicyTagManagerSerializationGrpcTransport, - transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, - ], -) -def test_policy_tag_manager_serialization_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.PolicyTagManagerSerializationGrpcTransport, grpc_helpers), - (transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_policy_tag_manager_serialization_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "datacatalog.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="datacatalog.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerSerializationGrpcTransport, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport]) -def test_policy_tag_manager_serialization_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_policy_tag_manager_serialization_host_no_port(transport_name): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'datacatalog.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_policy_tag_manager_serialization_host_with_port(transport_name): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'datacatalog.googleapis.com:8000' - ) - -def test_policy_tag_manager_serialization_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.PolicyTagManagerSerializationGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_policy_tag_manager_serialization_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.PolicyTagManagerSerializationGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerSerializationGrpcTransport, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport]) -def test_policy_tag_manager_serialization_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerSerializationGrpcTransport, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport]) -def test_policy_tag_manager_serialization_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_taxonomy_path(): - project = "squid" - location = "clam" - taxonomy = "whelk" - expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format(project=project, location=location, taxonomy=taxonomy, ) - actual = PolicyTagManagerSerializationClient.taxonomy_path(project, location, taxonomy) - assert expected == actual - - -def test_parse_taxonomy_path(): - expected = { - "project": "octopus", - "location": "oyster", - "taxonomy": "nudibranch", - } - path = PolicyTagManagerSerializationClient.taxonomy_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerSerializationClient.parse_taxonomy_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = PolicyTagManagerSerializationClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = PolicyTagManagerSerializationClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerSerializationClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = PolicyTagManagerSerializationClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = PolicyTagManagerSerializationClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerSerializationClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = PolicyTagManagerSerializationClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = PolicyTagManagerSerializationClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerSerializationClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = PolicyTagManagerSerializationClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = PolicyTagManagerSerializationClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerSerializationClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = PolicyTagManagerSerializationClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = PolicyTagManagerSerializationClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerSerializationClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.PolicyTagManagerSerializationTransport, '_prep_wrapped_messages') as prep: - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.PolicyTagManagerSerializationTransport, '_prep_wrapped_messages') as prep: - transport_class = PolicyTagManagerSerializationClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_delete_operation(transport: str = "grpc"): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc"): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport), - (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/.coveragerc b/owl-bot-staging/google-cloud-datacatalog/v1beta1/.coveragerc deleted file mode 100644 index 8d9d83e17533..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/datacatalog/__init__.py - google/cloud/datacatalog/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/.flake8 b/owl-bot-staging/google-cloud-datacatalog/v1beta1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/MANIFEST.in b/owl-bot-staging/google-cloud-datacatalog/v1beta1/MANIFEST.in deleted file mode 100644 index 0e9fef34a3a9..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/datacatalog *.py -recursive-include google/cloud/datacatalog_v1beta1 *.py diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/README.rst b/owl-bot-staging/google-cloud-datacatalog/v1beta1/README.rst deleted file mode 100644 index 8f53b24416fc..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Datacatalog API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Datacatalog API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/_static/custom.css b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/conf.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/conf.py deleted file mode 100644 index aec9c23130e4..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-datacatalog documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-datacatalog" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-datacatalog-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-datacatalog.tex", - u"google-cloud-datacatalog Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-datacatalog", - u"Google Cloud Datacatalog Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-datacatalog", - u"google-cloud-datacatalog Documentation", - author, - "google-cloud-datacatalog", - "GAPIC library for Google Cloud Datacatalog API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/data_catalog.rst b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/data_catalog.rst deleted file mode 100644 index 82ca26f399dc..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/data_catalog.rst +++ /dev/null @@ -1,10 +0,0 @@ -DataCatalog ------------------------------ - -.. automodule:: google.cloud.datacatalog_v1beta1.services.data_catalog - :members: - :inherited-members: - -.. automodule:: google.cloud.datacatalog_v1beta1.services.data_catalog.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/policy_tag_manager.rst b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/policy_tag_manager.rst deleted file mode 100644 index 8971945c327f..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/policy_tag_manager.rst +++ /dev/null @@ -1,10 +0,0 @@ -PolicyTagManager ----------------------------------- - -.. automodule:: google.cloud.datacatalog_v1beta1.services.policy_tag_manager - :members: - :inherited-members: - -.. automodule:: google.cloud.datacatalog_v1beta1.services.policy_tag_manager.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/policy_tag_manager_serialization.rst b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/policy_tag_manager_serialization.rst deleted file mode 100644 index aed4c56cde06..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/policy_tag_manager_serialization.rst +++ /dev/null @@ -1,6 +0,0 @@ -PolicyTagManagerSerialization ------------------------------------------------ - -.. automodule:: google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/services.rst b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/services.rst deleted file mode 100644 index 4f762e1c60d4..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/services.rst +++ /dev/null @@ -1,8 +0,0 @@ -Services for Google Cloud Datacatalog v1beta1 API -================================================= -.. toctree:: - :maxdepth: 2 - - data_catalog - policy_tag_manager - policy_tag_manager_serialization diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/types.rst b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/types.rst deleted file mode 100644 index a1baedafba34..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/datacatalog_v1beta1/types.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Datacatalog v1beta1 API -============================================== - -.. automodule:: google.cloud.datacatalog_v1beta1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/index.rst b/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/index.rst deleted file mode 100644 index ae7dac5f96ff..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - datacatalog_v1beta1/services - datacatalog_v1beta1/types diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/__init__.py deleted file mode 100644 index 822441d552a0..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/__init__.py +++ /dev/null @@ -1,183 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.datacatalog import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.datacatalog_v1beta1.services.data_catalog.client import DataCatalogClient -from google.cloud.datacatalog_v1beta1.services.data_catalog.async_client import DataCatalogAsyncClient -from google.cloud.datacatalog_v1beta1.services.policy_tag_manager.client import PolicyTagManagerClient -from google.cloud.datacatalog_v1beta1.services.policy_tag_manager.async_client import PolicyTagManagerAsyncClient -from google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization.client import PolicyTagManagerSerializationClient -from google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization.async_client import PolicyTagManagerSerializationAsyncClient - -from google.cloud.datacatalog_v1beta1.types.common import IntegratedSystem -from google.cloud.datacatalog_v1beta1.types.common import ManagingSystem -from google.cloud.datacatalog_v1beta1.types.datacatalog import CreateEntryGroupRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import CreateEntryRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import CreateTagRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import CreateTagTemplateFieldRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import CreateTagTemplateRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import DeleteEntryGroupRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import DeleteEntryRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import DeleteTagRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import DeleteTagTemplateFieldRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import DeleteTagTemplateRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import Entry -from google.cloud.datacatalog_v1beta1.types.datacatalog import EntryGroup -from google.cloud.datacatalog_v1beta1.types.datacatalog import GetEntryGroupRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import GetEntryRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import GetTagTemplateRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import ListEntriesRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import ListEntriesResponse -from google.cloud.datacatalog_v1beta1.types.datacatalog import ListEntryGroupsRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import ListEntryGroupsResponse -from google.cloud.datacatalog_v1beta1.types.datacatalog import ListTagsRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import ListTagsResponse -from google.cloud.datacatalog_v1beta1.types.datacatalog import LookupEntryRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import RenameTagTemplateFieldEnumValueRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import RenameTagTemplateFieldRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import SearchCatalogRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import SearchCatalogResponse -from google.cloud.datacatalog_v1beta1.types.datacatalog import UpdateEntryGroupRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import UpdateEntryRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import UpdateTagRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import UpdateTagTemplateFieldRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import UpdateTagTemplateRequest -from google.cloud.datacatalog_v1beta1.types.datacatalog import EntryType -from google.cloud.datacatalog_v1beta1.types.gcs_fileset_spec import GcsFilesetSpec -from google.cloud.datacatalog_v1beta1.types.gcs_fileset_spec import GcsFileSpec -from google.cloud.datacatalog_v1beta1.types.policytagmanager import CreatePolicyTagRequest -from google.cloud.datacatalog_v1beta1.types.policytagmanager import CreateTaxonomyRequest -from google.cloud.datacatalog_v1beta1.types.policytagmanager import DeletePolicyTagRequest -from google.cloud.datacatalog_v1beta1.types.policytagmanager import DeleteTaxonomyRequest -from google.cloud.datacatalog_v1beta1.types.policytagmanager import GetPolicyTagRequest -from google.cloud.datacatalog_v1beta1.types.policytagmanager import GetTaxonomyRequest -from google.cloud.datacatalog_v1beta1.types.policytagmanager import ListPolicyTagsRequest -from google.cloud.datacatalog_v1beta1.types.policytagmanager import ListPolicyTagsResponse -from google.cloud.datacatalog_v1beta1.types.policytagmanager import ListTaxonomiesRequest -from google.cloud.datacatalog_v1beta1.types.policytagmanager import ListTaxonomiesResponse -from google.cloud.datacatalog_v1beta1.types.policytagmanager import PolicyTag -from google.cloud.datacatalog_v1beta1.types.policytagmanager import Taxonomy -from google.cloud.datacatalog_v1beta1.types.policytagmanager import UpdatePolicyTagRequest -from google.cloud.datacatalog_v1beta1.types.policytagmanager import UpdateTaxonomyRequest -from google.cloud.datacatalog_v1beta1.types.policytagmanagerserialization import ExportTaxonomiesRequest -from google.cloud.datacatalog_v1beta1.types.policytagmanagerserialization import ExportTaxonomiesResponse -from google.cloud.datacatalog_v1beta1.types.policytagmanagerserialization import ImportTaxonomiesRequest -from google.cloud.datacatalog_v1beta1.types.policytagmanagerserialization import ImportTaxonomiesResponse -from google.cloud.datacatalog_v1beta1.types.policytagmanagerserialization import InlineSource -from google.cloud.datacatalog_v1beta1.types.policytagmanagerserialization import SerializedPolicyTag -from google.cloud.datacatalog_v1beta1.types.policytagmanagerserialization import SerializedTaxonomy -from google.cloud.datacatalog_v1beta1.types.schema import ColumnSchema -from google.cloud.datacatalog_v1beta1.types.schema import Schema -from google.cloud.datacatalog_v1beta1.types.search import SearchCatalogResult -from google.cloud.datacatalog_v1beta1.types.search import SearchResultType -from google.cloud.datacatalog_v1beta1.types.table_spec import BigQueryDateShardedSpec -from google.cloud.datacatalog_v1beta1.types.table_spec import BigQueryTableSpec -from google.cloud.datacatalog_v1beta1.types.table_spec import TableSpec -from google.cloud.datacatalog_v1beta1.types.table_spec import ViewSpec -from google.cloud.datacatalog_v1beta1.types.table_spec import TableSourceType -from google.cloud.datacatalog_v1beta1.types.tags import FieldType -from google.cloud.datacatalog_v1beta1.types.tags import Tag -from google.cloud.datacatalog_v1beta1.types.tags import TagField -from google.cloud.datacatalog_v1beta1.types.tags import TagTemplate -from google.cloud.datacatalog_v1beta1.types.tags import TagTemplateField -from google.cloud.datacatalog_v1beta1.types.timestamps import SystemTimestamps -from google.cloud.datacatalog_v1beta1.types.usage import UsageSignal -from google.cloud.datacatalog_v1beta1.types.usage import UsageStats - -__all__ = ('DataCatalogClient', - 'DataCatalogAsyncClient', - 'PolicyTagManagerClient', - 'PolicyTagManagerAsyncClient', - 'PolicyTagManagerSerializationClient', - 'PolicyTagManagerSerializationAsyncClient', - 'IntegratedSystem', - 'ManagingSystem', - 'CreateEntryGroupRequest', - 'CreateEntryRequest', - 'CreateTagRequest', - 'CreateTagTemplateFieldRequest', - 'CreateTagTemplateRequest', - 'DeleteEntryGroupRequest', - 'DeleteEntryRequest', - 'DeleteTagRequest', - 'DeleteTagTemplateFieldRequest', - 'DeleteTagTemplateRequest', - 'Entry', - 'EntryGroup', - 'GetEntryGroupRequest', - 'GetEntryRequest', - 'GetTagTemplateRequest', - 'ListEntriesRequest', - 'ListEntriesResponse', - 'ListEntryGroupsRequest', - 'ListEntryGroupsResponse', - 'ListTagsRequest', - 'ListTagsResponse', - 'LookupEntryRequest', - 'RenameTagTemplateFieldEnumValueRequest', - 'RenameTagTemplateFieldRequest', - 'SearchCatalogRequest', - 'SearchCatalogResponse', - 'UpdateEntryGroupRequest', - 'UpdateEntryRequest', - 'UpdateTagRequest', - 'UpdateTagTemplateFieldRequest', - 'UpdateTagTemplateRequest', - 'EntryType', - 'GcsFilesetSpec', - 'GcsFileSpec', - 'CreatePolicyTagRequest', - 'CreateTaxonomyRequest', - 'DeletePolicyTagRequest', - 'DeleteTaxonomyRequest', - 'GetPolicyTagRequest', - 'GetTaxonomyRequest', - 'ListPolicyTagsRequest', - 'ListPolicyTagsResponse', - 'ListTaxonomiesRequest', - 'ListTaxonomiesResponse', - 'PolicyTag', - 'Taxonomy', - 'UpdatePolicyTagRequest', - 'UpdateTaxonomyRequest', - 'ExportTaxonomiesRequest', - 'ExportTaxonomiesResponse', - 'ImportTaxonomiesRequest', - 'ImportTaxonomiesResponse', - 'InlineSource', - 'SerializedPolicyTag', - 'SerializedTaxonomy', - 'ColumnSchema', - 'Schema', - 'SearchCatalogResult', - 'SearchResultType', - 'BigQueryDateShardedSpec', - 'BigQueryTableSpec', - 'TableSpec', - 'ViewSpec', - 'TableSourceType', - 'FieldType', - 'Tag', - 'TagField', - 'TagTemplate', - 'TagTemplateField', - 'SystemTimestamps', - 'UsageSignal', - 'UsageStats', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/gapic_version.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/gapic_version.py deleted file mode 100644 index 360a0d13ebdd..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/py.typed b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/py.typed deleted file mode 100644 index bb4088a3c198..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-datacatalog package uses inline types. diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/__init__.py deleted file mode 100644 index e564e56f1fb5..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/__init__.py +++ /dev/null @@ -1,184 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.datacatalog_v1beta1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.data_catalog import DataCatalogClient -from .services.data_catalog import DataCatalogAsyncClient -from .services.policy_tag_manager import PolicyTagManagerClient -from .services.policy_tag_manager import PolicyTagManagerAsyncClient -from .services.policy_tag_manager_serialization import PolicyTagManagerSerializationClient -from .services.policy_tag_manager_serialization import PolicyTagManagerSerializationAsyncClient - -from .types.common import IntegratedSystem -from .types.common import ManagingSystem -from .types.datacatalog import CreateEntryGroupRequest -from .types.datacatalog import CreateEntryRequest -from .types.datacatalog import CreateTagRequest -from .types.datacatalog import CreateTagTemplateFieldRequest -from .types.datacatalog import CreateTagTemplateRequest -from .types.datacatalog import DeleteEntryGroupRequest -from .types.datacatalog import DeleteEntryRequest -from .types.datacatalog import DeleteTagRequest -from .types.datacatalog import DeleteTagTemplateFieldRequest -from .types.datacatalog import DeleteTagTemplateRequest -from .types.datacatalog import Entry -from .types.datacatalog import EntryGroup -from .types.datacatalog import GetEntryGroupRequest -from .types.datacatalog import GetEntryRequest -from .types.datacatalog import GetTagTemplateRequest -from .types.datacatalog import ListEntriesRequest -from .types.datacatalog import ListEntriesResponse -from .types.datacatalog import ListEntryGroupsRequest -from .types.datacatalog import ListEntryGroupsResponse -from .types.datacatalog import ListTagsRequest -from .types.datacatalog import ListTagsResponse -from .types.datacatalog import LookupEntryRequest -from .types.datacatalog import RenameTagTemplateFieldEnumValueRequest -from .types.datacatalog import RenameTagTemplateFieldRequest -from .types.datacatalog import SearchCatalogRequest -from .types.datacatalog import SearchCatalogResponse -from .types.datacatalog import UpdateEntryGroupRequest -from .types.datacatalog import UpdateEntryRequest -from .types.datacatalog import UpdateTagRequest -from .types.datacatalog import UpdateTagTemplateFieldRequest -from .types.datacatalog import UpdateTagTemplateRequest -from .types.datacatalog import EntryType -from .types.gcs_fileset_spec import GcsFilesetSpec -from .types.gcs_fileset_spec import GcsFileSpec -from .types.policytagmanager import CreatePolicyTagRequest -from .types.policytagmanager import CreateTaxonomyRequest -from .types.policytagmanager import DeletePolicyTagRequest -from .types.policytagmanager import DeleteTaxonomyRequest -from .types.policytagmanager import GetPolicyTagRequest -from .types.policytagmanager import GetTaxonomyRequest -from .types.policytagmanager import ListPolicyTagsRequest -from .types.policytagmanager import ListPolicyTagsResponse -from .types.policytagmanager import ListTaxonomiesRequest -from .types.policytagmanager import ListTaxonomiesResponse -from .types.policytagmanager import PolicyTag -from .types.policytagmanager import Taxonomy -from .types.policytagmanager import UpdatePolicyTagRequest -from .types.policytagmanager import UpdateTaxonomyRequest -from .types.policytagmanagerserialization import ExportTaxonomiesRequest -from .types.policytagmanagerserialization import ExportTaxonomiesResponse -from .types.policytagmanagerserialization import ImportTaxonomiesRequest -from .types.policytagmanagerserialization import ImportTaxonomiesResponse -from .types.policytagmanagerserialization import InlineSource -from .types.policytagmanagerserialization import SerializedPolicyTag -from .types.policytagmanagerserialization import SerializedTaxonomy -from .types.schema import ColumnSchema -from .types.schema import Schema -from .types.search import SearchCatalogResult -from .types.search import SearchResultType -from .types.table_spec import BigQueryDateShardedSpec -from .types.table_spec import BigQueryTableSpec -from .types.table_spec import TableSpec -from .types.table_spec import ViewSpec -from .types.table_spec import TableSourceType -from .types.tags import FieldType -from .types.tags import Tag -from .types.tags import TagField -from .types.tags import TagTemplate -from .types.tags import TagTemplateField -from .types.timestamps import SystemTimestamps -from .types.usage import UsageSignal -from .types.usage import UsageStats - -__all__ = ( - 'DataCatalogAsyncClient', - 'PolicyTagManagerAsyncClient', - 'PolicyTagManagerSerializationAsyncClient', -'BigQueryDateShardedSpec', -'BigQueryTableSpec', -'ColumnSchema', -'CreateEntryGroupRequest', -'CreateEntryRequest', -'CreatePolicyTagRequest', -'CreateTagRequest', -'CreateTagTemplateFieldRequest', -'CreateTagTemplateRequest', -'CreateTaxonomyRequest', -'DataCatalogClient', -'DeleteEntryGroupRequest', -'DeleteEntryRequest', -'DeletePolicyTagRequest', -'DeleteTagRequest', -'DeleteTagTemplateFieldRequest', -'DeleteTagTemplateRequest', -'DeleteTaxonomyRequest', -'Entry', -'EntryGroup', -'EntryType', -'ExportTaxonomiesRequest', -'ExportTaxonomiesResponse', -'FieldType', -'GcsFileSpec', -'GcsFilesetSpec', -'GetEntryGroupRequest', -'GetEntryRequest', -'GetPolicyTagRequest', -'GetTagTemplateRequest', -'GetTaxonomyRequest', -'ImportTaxonomiesRequest', -'ImportTaxonomiesResponse', -'InlineSource', -'IntegratedSystem', -'ListEntriesRequest', -'ListEntriesResponse', -'ListEntryGroupsRequest', -'ListEntryGroupsResponse', -'ListPolicyTagsRequest', -'ListPolicyTagsResponse', -'ListTagsRequest', -'ListTagsResponse', -'ListTaxonomiesRequest', -'ListTaxonomiesResponse', -'LookupEntryRequest', -'ManagingSystem', -'PolicyTag', -'PolicyTagManagerClient', -'PolicyTagManagerSerializationClient', -'RenameTagTemplateFieldEnumValueRequest', -'RenameTagTemplateFieldRequest', -'Schema', -'SearchCatalogRequest', -'SearchCatalogResponse', -'SearchCatalogResult', -'SearchResultType', -'SerializedPolicyTag', -'SerializedTaxonomy', -'SystemTimestamps', -'TableSourceType', -'TableSpec', -'Tag', -'TagField', -'TagTemplate', -'TagTemplateField', -'Taxonomy', -'UpdateEntryGroupRequest', -'UpdateEntryRequest', -'UpdatePolicyTagRequest', -'UpdateTagRequest', -'UpdateTagTemplateFieldRequest', -'UpdateTagTemplateRequest', -'UpdateTaxonomyRequest', -'UsageSignal', -'UsageStats', -'ViewSpec', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/gapic_metadata.json b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/gapic_metadata.json deleted file mode 100644 index b40fba91c7c5..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/gapic_metadata.json +++ /dev/null @@ -1,481 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.datacatalog_v1beta1", - "protoPackage": "google.cloud.datacatalog.v1beta1", - "schema": "1.0", - "services": { - "DataCatalog": { - "clients": { - "grpc": { - "libraryClient": "DataCatalogClient", - "rpcs": { - "CreateEntry": { - "methods": [ - "create_entry" - ] - }, - "CreateEntryGroup": { - "methods": [ - "create_entry_group" - ] - }, - "CreateTag": { - "methods": [ - "create_tag" - ] - }, - "CreateTagTemplate": { - "methods": [ - "create_tag_template" - ] - }, - "CreateTagTemplateField": { - "methods": [ - "create_tag_template_field" - ] - }, - "DeleteEntry": { - "methods": [ - "delete_entry" - ] - }, - "DeleteEntryGroup": { - "methods": [ - "delete_entry_group" - ] - }, - "DeleteTag": { - "methods": [ - "delete_tag" - ] - }, - "DeleteTagTemplate": { - "methods": [ - "delete_tag_template" - ] - }, - "DeleteTagTemplateField": { - "methods": [ - "delete_tag_template_field" - ] - }, - "GetEntry": { - "methods": [ - "get_entry" - ] - }, - "GetEntryGroup": { - "methods": [ - "get_entry_group" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetTagTemplate": { - "methods": [ - "get_tag_template" - ] - }, - "ListEntries": { - "methods": [ - "list_entries" - ] - }, - "ListEntryGroups": { - "methods": [ - "list_entry_groups" - ] - }, - "ListTags": { - "methods": [ - "list_tags" - ] - }, - "LookupEntry": { - "methods": [ - "lookup_entry" - ] - }, - "RenameTagTemplateField": { - "methods": [ - "rename_tag_template_field" - ] - }, - "RenameTagTemplateFieldEnumValue": { - "methods": [ - "rename_tag_template_field_enum_value" - ] - }, - "SearchCatalog": { - "methods": [ - "search_catalog" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateEntry": { - "methods": [ - "update_entry" - ] - }, - "UpdateEntryGroup": { - "methods": [ - "update_entry_group" - ] - }, - "UpdateTag": { - "methods": [ - "update_tag" - ] - }, - "UpdateTagTemplate": { - "methods": [ - "update_tag_template" - ] - }, - "UpdateTagTemplateField": { - "methods": [ - "update_tag_template_field" - ] - } - } - }, - "grpc-async": { - "libraryClient": "DataCatalogAsyncClient", - "rpcs": { - "CreateEntry": { - "methods": [ - "create_entry" - ] - }, - "CreateEntryGroup": { - "methods": [ - "create_entry_group" - ] - }, - "CreateTag": { - "methods": [ - "create_tag" - ] - }, - "CreateTagTemplate": { - "methods": [ - "create_tag_template" - ] - }, - "CreateTagTemplateField": { - "methods": [ - "create_tag_template_field" - ] - }, - "DeleteEntry": { - "methods": [ - "delete_entry" - ] - }, - "DeleteEntryGroup": { - "methods": [ - "delete_entry_group" - ] - }, - "DeleteTag": { - "methods": [ - "delete_tag" - ] - }, - "DeleteTagTemplate": { - "methods": [ - "delete_tag_template" - ] - }, - "DeleteTagTemplateField": { - "methods": [ - "delete_tag_template_field" - ] - }, - "GetEntry": { - "methods": [ - "get_entry" - ] - }, - "GetEntryGroup": { - "methods": [ - "get_entry_group" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetTagTemplate": { - "methods": [ - "get_tag_template" - ] - }, - "ListEntries": { - "methods": [ - "list_entries" - ] - }, - "ListEntryGroups": { - "methods": [ - "list_entry_groups" - ] - }, - "ListTags": { - "methods": [ - "list_tags" - ] - }, - "LookupEntry": { - "methods": [ - "lookup_entry" - ] - }, - "RenameTagTemplateField": { - "methods": [ - "rename_tag_template_field" - ] - }, - "RenameTagTemplateFieldEnumValue": { - "methods": [ - "rename_tag_template_field_enum_value" - ] - }, - "SearchCatalog": { - "methods": [ - "search_catalog" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateEntry": { - "methods": [ - "update_entry" - ] - }, - "UpdateEntryGroup": { - "methods": [ - "update_entry_group" - ] - }, - "UpdateTag": { - "methods": [ - "update_tag" - ] - }, - "UpdateTagTemplate": { - "methods": [ - "update_tag_template" - ] - }, - "UpdateTagTemplateField": { - "methods": [ - "update_tag_template_field" - ] - } - } - } - } - }, - "PolicyTagManager": { - "clients": { - "grpc": { - "libraryClient": "PolicyTagManagerClient", - "rpcs": { - "CreatePolicyTag": { - "methods": [ - "create_policy_tag" - ] - }, - "CreateTaxonomy": { - "methods": [ - "create_taxonomy" - ] - }, - "DeletePolicyTag": { - "methods": [ - "delete_policy_tag" - ] - }, - "DeleteTaxonomy": { - "methods": [ - "delete_taxonomy" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetPolicyTag": { - "methods": [ - "get_policy_tag" - ] - }, - "GetTaxonomy": { - "methods": [ - "get_taxonomy" - ] - }, - "ListPolicyTags": { - "methods": [ - "list_policy_tags" - ] - }, - "ListTaxonomies": { - "methods": [ - "list_taxonomies" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdatePolicyTag": { - "methods": [ - "update_policy_tag" - ] - }, - "UpdateTaxonomy": { - "methods": [ - "update_taxonomy" - ] - } - } - }, - "grpc-async": { - "libraryClient": "PolicyTagManagerAsyncClient", - "rpcs": { - "CreatePolicyTag": { - "methods": [ - "create_policy_tag" - ] - }, - "CreateTaxonomy": { - "methods": [ - "create_taxonomy" - ] - }, - "DeletePolicyTag": { - "methods": [ - "delete_policy_tag" - ] - }, - "DeleteTaxonomy": { - "methods": [ - "delete_taxonomy" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetPolicyTag": { - "methods": [ - "get_policy_tag" - ] - }, - "GetTaxonomy": { - "methods": [ - "get_taxonomy" - ] - }, - "ListPolicyTags": { - "methods": [ - "list_policy_tags" - ] - }, - "ListTaxonomies": { - "methods": [ - "list_taxonomies" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdatePolicyTag": { - "methods": [ - "update_policy_tag" - ] - }, - "UpdateTaxonomy": { - "methods": [ - "update_taxonomy" - ] - } - } - } - } - }, - "PolicyTagManagerSerialization": { - "clients": { - "grpc": { - "libraryClient": "PolicyTagManagerSerializationClient", - "rpcs": { - "ExportTaxonomies": { - "methods": [ - "export_taxonomies" - ] - }, - "ImportTaxonomies": { - "methods": [ - "import_taxonomies" - ] - } - } - }, - "grpc-async": { - "libraryClient": "PolicyTagManagerSerializationAsyncClient", - "rpcs": { - "ExportTaxonomies": { - "methods": [ - "export_taxonomies" - ] - }, - "ImportTaxonomies": { - "methods": [ - "import_taxonomies" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/gapic_version.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/gapic_version.py deleted file mode 100644 index 360a0d13ebdd..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/py.typed b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/py.typed deleted file mode 100644 index bb4088a3c198..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-datacatalog package uses inline types. diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/__init__.py deleted file mode 100644 index 89a37dc92c5a..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/__init__.py deleted file mode 100644 index e703e914bb2c..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import DataCatalogClient -from .async_client import DataCatalogAsyncClient - -__all__ = ( - 'DataCatalogClient', - 'DataCatalogAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py deleted file mode 100644 index 49619a4b91e3..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py +++ /dev/null @@ -1,3653 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.datacatalog_v1beta1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.datacatalog_v1beta1.services.data_catalog import pagers -from google.cloud.datacatalog_v1beta1.types import common -from google.cloud.datacatalog_v1beta1.types import datacatalog -from google.cloud.datacatalog_v1beta1.types import gcs_fileset_spec -from google.cloud.datacatalog_v1beta1.types import schema -from google.cloud.datacatalog_v1beta1.types import search -from google.cloud.datacatalog_v1beta1.types import table_spec -from google.cloud.datacatalog_v1beta1.types import tags -from google.cloud.datacatalog_v1beta1.types import timestamps -from google.cloud.datacatalog_v1beta1.types import usage -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import DataCatalogGrpcAsyncIOTransport -from .client import DataCatalogClient - - -class DataCatalogAsyncClient: - """Data Catalog API service allows clients to discover, - understand, and manage their data. - """ - - _client: DataCatalogClient - - DEFAULT_ENDPOINT = DataCatalogClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = DataCatalogClient.DEFAULT_MTLS_ENDPOINT - - entry_path = staticmethod(DataCatalogClient.entry_path) - parse_entry_path = staticmethod(DataCatalogClient.parse_entry_path) - entry_group_path = staticmethod(DataCatalogClient.entry_group_path) - parse_entry_group_path = staticmethod(DataCatalogClient.parse_entry_group_path) - tag_path = staticmethod(DataCatalogClient.tag_path) - parse_tag_path = staticmethod(DataCatalogClient.parse_tag_path) - tag_template_path = staticmethod(DataCatalogClient.tag_template_path) - parse_tag_template_path = staticmethod(DataCatalogClient.parse_tag_template_path) - tag_template_field_path = staticmethod(DataCatalogClient.tag_template_field_path) - parse_tag_template_field_path = staticmethod(DataCatalogClient.parse_tag_template_field_path) - tag_template_field_enum_value_path = staticmethod(DataCatalogClient.tag_template_field_enum_value_path) - parse_tag_template_field_enum_value_path = staticmethod(DataCatalogClient.parse_tag_template_field_enum_value_path) - common_billing_account_path = staticmethod(DataCatalogClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(DataCatalogClient.parse_common_billing_account_path) - common_folder_path = staticmethod(DataCatalogClient.common_folder_path) - parse_common_folder_path = staticmethod(DataCatalogClient.parse_common_folder_path) - common_organization_path = staticmethod(DataCatalogClient.common_organization_path) - parse_common_organization_path = staticmethod(DataCatalogClient.parse_common_organization_path) - common_project_path = staticmethod(DataCatalogClient.common_project_path) - parse_common_project_path = staticmethod(DataCatalogClient.parse_common_project_path) - common_location_path = staticmethod(DataCatalogClient.common_location_path) - parse_common_location_path = staticmethod(DataCatalogClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataCatalogAsyncClient: The constructed client. - """ - return DataCatalogClient.from_service_account_info.__func__(DataCatalogAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataCatalogAsyncClient: The constructed client. - """ - return DataCatalogClient.from_service_account_file.__func__(DataCatalogAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return DataCatalogClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> DataCatalogTransport: - """Returns the transport used by the client instance. - - Returns: - DataCatalogTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(DataCatalogClient).get_transport_class, type(DataCatalogClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, DataCatalogTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the data catalog client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.DataCatalogTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = DataCatalogClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def search_catalog(self, - request: Optional[Union[datacatalog.SearchCatalogRequest, dict]] = None, - *, - scope: Optional[datacatalog.SearchCatalogRequest.Scope] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.SearchCatalogAsyncPager: - r"""Searches Data Catalog for multiple resources like entries, tags - that match a query. - - This is a custom method - (https://cloud.google.com/apis/design/custom_methods) and does - not return the complete resource, only the resource identifier - and high level fields. Clients can subsequently call ``Get`` - methods. - - Note that Data Catalog search queries do not guarantee full - recall. Query results that match your query may not be returned, - even in subsequent result pages. Also note that results returned - (and not returned) can vary across repeated search queries. - - See `Data Catalog Search - Syntax `__ - for more information. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_search_catalog(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.SearchCatalogRequest( - ) - - # Make the request - page_result = client.search_catalog(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.SearchCatalogRequest, dict]]): - The request object. Request message for - [SearchCatalog][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog]. - scope (:class:`google.cloud.datacatalog_v1beta1.types.SearchCatalogRequest.Scope`): - Required. The scope of this search request. A ``scope`` - that has empty ``include_org_ids``, - ``include_project_ids`` AND false - ``include_gcp_public_datasets`` is considered invalid. - Data Catalog will return an error in such a case. - - This corresponds to the ``scope`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - query (:class:`str`): - Optional. The query string in search query syntax. An - empty query string will result in all data assets (in - the specified scope) that the user has access to. Query - strings can be simple as "x" or more qualified as: - - - name:x - - column:x - - description:y - - Note: Query tokens need to have a minimum of 3 - characters for substring matching to work correctly. See - `Data Catalog Search - Syntax `__ - for more information. - - This corresponds to the ``query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.SearchCatalogAsyncPager: - Response message for - [SearchCatalog][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, query]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.SearchCatalogRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if scope is not None: - request.scope = scope - if query is not None: - request.query = query - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.search_catalog, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.SearchCatalogAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_entry_group(self, - request: Optional[Union[datacatalog.CreateEntryGroupRequest, dict]] = None, - *, - parent: Optional[str] = None, - entry_group_id: Optional[str] = None, - entry_group: Optional[datacatalog.EntryGroup] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.EntryGroup: - r"""A maximum of 10,000 entry groups may be created per organization - across all locations. - - Users should enable the Data Catalog API in the project - identified by the ``parent`` parameter (see [Data Catalog - Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_create_entry_group(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.CreateEntryGroupRequest( - parent="parent_value", - entry_group_id="entry_group_id_value", - ) - - # Make the request - response = await client.create_entry_group(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.CreateEntryGroupRequest, dict]]): - The request object. Request message for - [CreateEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntryGroup]. - parent (:class:`str`): - Required. The name of the project this entry group is - in. Example: - - - projects/{project_id}/locations/{location} - - Note that this EntryGroup and its child resources may - not actually be stored in the location in this name. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_group_id (:class:`str`): - Required. The id of the entry group - to create. The id must begin with a - letter or underscore, contain only - English letters, numbers and - underscores, and be at most 64 - characters. - - This corresponds to the ``entry_group_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_group (:class:`google.cloud.datacatalog_v1beta1.types.EntryGroup`): - The entry group to create. Defaults - to an empty entry group. - - This corresponds to the ``entry_group`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.EntryGroup: - EntryGroup Metadata. - An EntryGroup resource represents a logical grouping - of zero or more Data Catalog - [Entry][google.cloud.datacatalog.v1beta1.Entry] - resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, entry_group_id, entry_group]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.CreateEntryGroupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entry_group_id is not None: - request.entry_group_id = entry_group_id - if entry_group is not None: - request.entry_group = entry_group - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_entry_group, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_entry_group(self, - request: Optional[Union[datacatalog.UpdateEntryGroupRequest, dict]] = None, - *, - entry_group: Optional[datacatalog.EntryGroup] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.EntryGroup: - r"""Updates an EntryGroup. The user should enable the Data Catalog - API in the project identified by the ``entry_group.name`` - parameter (see [Data Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_update_entry_group(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.UpdateEntryGroupRequest( - ) - - # Make the request - response = await client.update_entry_group(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.UpdateEntryGroupRequest, dict]]): - The request object. Request message for - [UpdateEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntryGroup]. - entry_group (:class:`google.cloud.datacatalog_v1beta1.types.EntryGroup`): - Required. The updated entry group. - "name" field must be set. - - This corresponds to the ``entry_group`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Names of fields whose values to - overwrite on an entry group. - If this parameter is absent or empty, - all modifiable fields are overwritten. - If such fields are non-required and - omitted in the request body, their - values are emptied. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.EntryGroup: - EntryGroup Metadata. - An EntryGroup resource represents a logical grouping - of zero or more Data Catalog - [Entry][google.cloud.datacatalog.v1beta1.Entry] - resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([entry_group, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.UpdateEntryGroupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if entry_group is not None: - request.entry_group = entry_group - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_entry_group, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entry_group.name", request.entry_group.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_entry_group(self, - request: Optional[Union[datacatalog.GetEntryGroupRequest, dict]] = None, - *, - name: Optional[str] = None, - read_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.EntryGroup: - r"""Gets an EntryGroup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_get_entry_group(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.GetEntryGroupRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entry_group(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.GetEntryGroupRequest, dict]]): - The request object. Request message for - [GetEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.GetEntryGroup]. - name (:class:`str`): - Required. The name of the entry group. For example, - ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - read_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The fields to return. If not set or - empty, all fields are returned. - - This corresponds to the ``read_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.EntryGroup: - EntryGroup Metadata. - An EntryGroup resource represents a logical grouping - of zero or more Data Catalog - [Entry][google.cloud.datacatalog.v1beta1.Entry] - resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, read_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.GetEntryGroupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if read_mask is not None: - request.read_mask = read_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_entry_group, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_entry_group(self, - request: Optional[Union[datacatalog.DeleteEntryGroupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an EntryGroup. Only entry groups that do not contain - entries can be deleted. Users should enable the Data Catalog API - in the project identified by the ``name`` parameter (see [Data - Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_delete_entry_group(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteEntryGroupRequest( - name="name_value", - ) - - # Make the request - await client.delete_entry_group(request=request) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.DeleteEntryGroupRequest, dict]]): - The request object. Request message for - [DeleteEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntryGroup]. - name (:class:`str`): - Required. The name of the entry group. For example, - ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.DeleteEntryGroupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_entry_group, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_entry_groups(self, - request: Optional[Union[datacatalog.ListEntryGroupsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListEntryGroupsAsyncPager: - r"""Lists entry groups. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_list_entry_groups(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ListEntryGroupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_groups(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.ListEntryGroupsRequest, dict]]): - The request object. Request message for - [ListEntryGroups][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntryGroups]. - parent (:class:`str`): - Required. The name of the location that contains the - entry groups, which can be provided in URL format. - Example: - - - projects/{project_id}/locations/{location} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListEntryGroupsAsyncPager: - Response message for - [ListEntryGroups][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntryGroups]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.ListEntryGroupsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_entry_groups, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListEntryGroupsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_entry(self, - request: Optional[Union[datacatalog.CreateEntryRequest, dict]] = None, - *, - parent: Optional[str] = None, - entry_id: Optional[str] = None, - entry: Optional[datacatalog.Entry] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.Entry: - r"""Creates an entry. Only entries of 'FILESET' type or - user-specified type can be created. - - Users should enable the Data Catalog API in the project - identified by the ``parent`` parameter (see [Data Catalog - Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - A maximum of 100,000 entries may be created per entry group. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_create_entry(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - entry = datacatalog_v1beta1.Entry() - entry.type_ = "FILESET" - entry.integrated_system = "CLOUD_PUBSUB" - entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] - - request = datacatalog_v1beta1.CreateEntryRequest( - parent="parent_value", - entry_id="entry_id_value", - entry=entry, - ) - - # Make the request - response = await client.create_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.CreateEntryRequest, dict]]): - The request object. Request message for - [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry]. - parent (:class:`str`): - Required. The name of the entry group this entry is in. - Example: - - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} - - Note that this Entry and its child resources may not - actually be stored in the location in this name. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_id (:class:`str`): - Required. The id of the entry to - create. - - This corresponds to the ``entry_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry (:class:`google.cloud.datacatalog_v1beta1.types.Entry`): - Required. The entry to create. - This corresponds to the ``entry`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.Entry: - Entry Metadata. - A Data Catalog Entry resource represents another - resource in Google Cloud Platform (such as a BigQuery - dataset or a Pub/Sub topic), or outside of Google - Cloud Platform. Clients can use the linked_resource - field in the Entry resource to refer to the original - resource ID of the source system. - - An Entry resource contains resource details, such as - its schema. An Entry can also be used to attach - flexible metadata, such as a - [Tag][google.cloud.datacatalog.v1beta1.Tag]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, entry_id, entry]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.CreateEntryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entry_id is not None: - request.entry_id = entry_id - if entry is not None: - request.entry = entry - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_entry, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_entry(self, - request: Optional[Union[datacatalog.UpdateEntryRequest, dict]] = None, - *, - entry: Optional[datacatalog.Entry] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.Entry: - r"""Updates an existing entry. Users should enable the Data Catalog - API in the project identified by the ``entry.name`` parameter - (see [Data Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_update_entry(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - entry = datacatalog_v1beta1.Entry() - entry.type_ = "FILESET" - entry.integrated_system = "CLOUD_PUBSUB" - entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] - - request = datacatalog_v1beta1.UpdateEntryRequest( - entry=entry, - ) - - # Make the request - response = await client.update_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.UpdateEntryRequest, dict]]): - The request object. Request message for - [UpdateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntry]. - entry (:class:`google.cloud.datacatalog_v1beta1.types.Entry`): - Required. The updated entry. The - "name" field must be set. - - This corresponds to the ``entry`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Names of fields whose values to overwrite on an entry. - - If this parameter is absent or empty, all modifiable - fields are overwritten. If such fields are non-required - and omitted in the request body, their values are - emptied. - - The following fields are modifiable: - - - For entries with type ``DATA_STREAM``: - - - ``schema`` - - - For entries with type ``FILESET``: - - - ``schema`` - - ``display_name`` - - ``description`` - - ``gcs_fileset_spec`` - - ``gcs_fileset_spec.file_patterns`` - - - For entries with ``user_specified_type``: - - - ``schema`` - - ``display_name`` - - ``description`` - - ``user_specified_type`` - - ``user_specified_system`` - - ``linked_resource`` - - ``source_system_timestamps`` - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.Entry: - Entry Metadata. - A Data Catalog Entry resource represents another - resource in Google Cloud Platform (such as a BigQuery - dataset or a Pub/Sub topic), or outside of Google - Cloud Platform. Clients can use the linked_resource - field in the Entry resource to refer to the original - resource ID of the source system. - - An Entry resource contains resource details, such as - its schema. An Entry can also be used to attach - flexible metadata, such as a - [Tag][google.cloud.datacatalog.v1beta1.Tag]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([entry, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.UpdateEntryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if entry is not None: - request.entry = entry - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_entry, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entry.name", request.entry.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_entry(self, - request: Optional[Union[datacatalog.DeleteEntryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an existing entry. Only entries created through - [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry] - method can be deleted. Users should enable the Data Catalog API - in the project identified by the ``name`` parameter (see [Data - Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_delete_entry(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteEntryRequest( - name="name_value", - ) - - # Make the request - await client.delete_entry(request=request) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.DeleteEntryRequest, dict]]): - The request object. Request message for - [DeleteEntry][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntry]. - name (:class:`str`): - Required. The name of the entry. Example: - - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.DeleteEntryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_entry, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_entry(self, - request: Optional[Union[datacatalog.GetEntryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.Entry: - r"""Gets an entry. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_get_entry(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.GetEntryRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.GetEntryRequest, dict]]): - The request object. Request message for - [GetEntry][google.cloud.datacatalog.v1beta1.DataCatalog.GetEntry]. - name (:class:`str`): - Required. The name of the entry. Example: - - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.Entry: - Entry Metadata. - A Data Catalog Entry resource represents another - resource in Google Cloud Platform (such as a BigQuery - dataset or a Pub/Sub topic), or outside of Google - Cloud Platform. Clients can use the linked_resource - field in the Entry resource to refer to the original - resource ID of the source system. - - An Entry resource contains resource details, such as - its schema. An Entry can also be used to attach - flexible metadata, such as a - [Tag][google.cloud.datacatalog.v1beta1.Tag]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.GetEntryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_entry, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def lookup_entry(self, - request: Optional[Union[datacatalog.LookupEntryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.Entry: - r"""Get an entry by target resource name. This method - allows clients to use the resource name from the source - Google Cloud Platform service to get the Data Catalog - Entry. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_lookup_entry(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.LookupEntryRequest( - linked_resource="linked_resource_value", - ) - - # Make the request - response = await client.lookup_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.LookupEntryRequest, dict]]): - The request object. Request message for - [LookupEntry][google.cloud.datacatalog.v1beta1.DataCatalog.LookupEntry]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.Entry: - Entry Metadata. - A Data Catalog Entry resource represents another - resource in Google Cloud Platform (such as a BigQuery - dataset or a Pub/Sub topic), or outside of Google - Cloud Platform. Clients can use the linked_resource - field in the Entry resource to refer to the original - resource ID of the source system. - - An Entry resource contains resource details, such as - its schema. An Entry can also be used to attach - flexible metadata, such as a - [Tag][google.cloud.datacatalog.v1beta1.Tag]. - - """ - # Create or coerce a protobuf request object. - request = datacatalog.LookupEntryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.lookup_entry, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_entries(self, - request: Optional[Union[datacatalog.ListEntriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListEntriesAsyncPager: - r"""Lists entries. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_list_entries(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ListEntriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entries(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.ListEntriesRequest, dict]]): - The request object. Request message for - [ListEntries][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntries]. - parent (:class:`str`): - Required. The name of the entry group that contains the - entries, which can be provided in URL format. Example: - - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListEntriesAsyncPager: - Response message for - [ListEntries][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntries]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.ListEntriesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_entries, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListEntriesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_tag_template(self, - request: Optional[Union[datacatalog.CreateTagTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - tag_template_id: Optional[str] = None, - tag_template: Optional[tags.TagTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplate: - r"""Creates a tag template. The user should enable the Data Catalog - API in the project identified by the ``parent`` parameter (see - `Data Catalog Resource - Project `__ - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_create_tag_template(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.CreateTagTemplateRequest( - parent="parent_value", - tag_template_id="tag_template_id_value", - ) - - # Make the request - response = await client.create_tag_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.CreateTagTemplateRequest, dict]]): - The request object. Request message for - [CreateTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplate]. - parent (:class:`str`): - Required. The name of the project and the template - location - [region](https://cloud.google.com/data-catalog/docs/concepts/regions. - - Example: - - - projects/{project_id}/locations/us-central1 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag_template_id (:class:`str`): - Required. The id of the tag template - to create. - - This corresponds to the ``tag_template_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag_template (:class:`google.cloud.datacatalog_v1beta1.types.TagTemplate`): - Required. The tag template to create. - This corresponds to the ``tag_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.TagTemplate: - A tag template defines a tag, which can have one or more typed fields. - The template is used to create and attach the tag to - Google Cloud resources. [Tag template - roles](\ https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) - provide permissions to create, edit, and use the - template. See, for example, the [TagTemplate - User](\ https://cloud.google.com/data-catalog/docs/how-to/template-user) - role, which includes permission to use the tag - template to tag resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, tag_template_id, tag_template]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.CreateTagTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if tag_template_id is not None: - request.tag_template_id = tag_template_id - if tag_template is not None: - request.tag_template = tag_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_tag_template, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_tag_template(self, - request: Optional[Union[datacatalog.GetTagTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplate: - r"""Gets a tag template. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_get_tag_template(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.GetTagTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_tag_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.GetTagTemplateRequest, dict]]): - The request object. Request message for - [GetTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.GetTagTemplate]. - name (:class:`str`): - Required. The name of the tag template. Example: - - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.TagTemplate: - A tag template defines a tag, which can have one or more typed fields. - The template is used to create and attach the tag to - Google Cloud resources. [Tag template - roles](\ https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) - provide permissions to create, edit, and use the - template. See, for example, the [TagTemplate - User](\ https://cloud.google.com/data-catalog/docs/how-to/template-user) - role, which includes permission to use the tag - template to tag resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.GetTagTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_tag_template, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_tag_template(self, - request: Optional[Union[datacatalog.UpdateTagTemplateRequest, dict]] = None, - *, - tag_template: Optional[tags.TagTemplate] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplate: - r"""Updates a tag template. This method cannot be used to update the - fields of a template. The tag template fields are represented as - separate resources and should be updated using their own - create/update/delete methods. Users should enable the Data - Catalog API in the project identified by the - ``tag_template.name`` parameter (see [Data Catalog Resource - Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_update_tag_template(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.UpdateTagTemplateRequest( - ) - - # Make the request - response = await client.update_tag_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.UpdateTagTemplateRequest, dict]]): - The request object. Request message for - [UpdateTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplate]. - tag_template (:class:`google.cloud.datacatalog_v1beta1.types.TagTemplate`): - Required. The template to update. The - "name" field must be set. - - This corresponds to the ``tag_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Names of fields whose values to overwrite on a tag - template. Currently, only ``display_name`` can be - overwritten. - - In general, if this parameter is absent or empty, all - modifiable fields are overwritten. If such fields are - non-required and omitted in the request body, their - values are emptied. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.TagTemplate: - A tag template defines a tag, which can have one or more typed fields. - The template is used to create and attach the tag to - Google Cloud resources. [Tag template - roles](\ https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) - provide permissions to create, edit, and use the - template. See, for example, the [TagTemplate - User](\ https://cloud.google.com/data-catalog/docs/how-to/template-user) - role, which includes permission to use the tag - template to tag resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([tag_template, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.UpdateTagTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if tag_template is not None: - request.tag_template = tag_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_tag_template, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("tag_template.name", request.tag_template.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_tag_template(self, - request: Optional[Union[datacatalog.DeleteTagTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - force: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a tag template and all tags using the template. Users - should enable the Data Catalog API in the project identified by - the ``name`` parameter (see [Data Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_delete_tag_template(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteTagTemplateRequest( - name="name_value", - force=True, - ) - - # Make the request - await client.delete_tag_template(request=request) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.DeleteTagTemplateRequest, dict]]): - The request object. Request message for - [DeleteTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplate]. - name (:class:`str`): - Required. The name of the tag template to delete. - Example: - - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - force (:class:`bool`): - Required. Currently, this field must always be set to - ``true``. This confirms the deletion of any possible - tags using this template. ``force = false`` will be - supported in the future. - - This corresponds to the ``force`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, force]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.DeleteTagTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if force is not None: - request.force = force - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_tag_template, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_tag_template_field(self, - request: Optional[Union[datacatalog.CreateTagTemplateFieldRequest, dict]] = None, - *, - parent: Optional[str] = None, - tag_template_field_id: Optional[str] = None, - tag_template_field: Optional[tags.TagTemplateField] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplateField: - r"""Creates a field in a tag template. The user should enable the - Data Catalog API in the project identified by the ``parent`` - parameter (see `Data Catalog Resource - Project `__ - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_create_tag_template_field(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - tag_template_field = datacatalog_v1beta1.TagTemplateField() - tag_template_field.type_.primitive_type = "TIMESTAMP" - - request = datacatalog_v1beta1.CreateTagTemplateFieldRequest( - parent="parent_value", - tag_template_field_id="tag_template_field_id_value", - tag_template_field=tag_template_field, - ) - - # Make the request - response = await client.create_tag_template_field(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.CreateTagTemplateFieldRequest, dict]]): - The request object. Request message for - [CreateTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplateField]. - parent (:class:`str`): - Required. The name of the project and the template - location - `region `__. - - Example: - - - projects/{project_id}/locations/us-central1/tagTemplates/{tag_template_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag_template_field_id (:class:`str`): - Required. The ID of the tag template field to create. - Field ids can contain letters (both uppercase and - lowercase), numbers (0-9), underscores (_) and dashes - (-). Field IDs must be at least 1 character long and at - most 128 characters long. Field IDs must also be unique - within their template. - - This corresponds to the ``tag_template_field_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag_template_field (:class:`google.cloud.datacatalog_v1beta1.types.TagTemplateField`): - Required. The tag template field to - create. - - This corresponds to the ``tag_template_field`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.TagTemplateField: - The template for an individual field - within a tag template. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, tag_template_field_id, tag_template_field]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.CreateTagTemplateFieldRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if tag_template_field_id is not None: - request.tag_template_field_id = tag_template_field_id - if tag_template_field is not None: - request.tag_template_field = tag_template_field - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_tag_template_field, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_tag_template_field(self, - request: Optional[Union[datacatalog.UpdateTagTemplateFieldRequest, dict]] = None, - *, - name: Optional[str] = None, - tag_template_field: Optional[tags.TagTemplateField] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplateField: - r"""Updates a field in a tag template. This method cannot be used to - update the field type. Users should enable the Data Catalog API - in the project identified by the ``name`` parameter (see [Data - Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_update_tag_template_field(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - tag_template_field = datacatalog_v1beta1.TagTemplateField() - tag_template_field.type_.primitive_type = "TIMESTAMP" - - request = datacatalog_v1beta1.UpdateTagTemplateFieldRequest( - name="name_value", - tag_template_field=tag_template_field, - ) - - # Make the request - response = await client.update_tag_template_field(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.UpdateTagTemplateFieldRequest, dict]]): - The request object. Request message for - [UpdateTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplateField]. - name (:class:`str`): - Required. The name of the tag template field. Example: - - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag_template_field (:class:`google.cloud.datacatalog_v1beta1.types.TagTemplateField`): - Required. The template to update. - This corresponds to the ``tag_template_field`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Optional. Names of fields whose values to overwrite on - an individual field of a tag template. The following - fields are modifiable: - - - ``display_name`` - - ``type.enum_type`` - - ``is_required`` - - If this parameter is absent or empty, all modifiable - fields are overwritten. If such fields are non-required - and omitted in the request body, their values are - emptied with one exception: when updating an enum type, - the provided values are merged with the existing values. - Therefore, enum values can only be added, existing enum - values cannot be deleted or renamed. - - Additionally, updating a template field from optional to - required is *not* allowed. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.TagTemplateField: - The template for an individual field - within a tag template. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, tag_template_field, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.UpdateTagTemplateFieldRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if tag_template_field is not None: - request.tag_template_field = tag_template_field - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_tag_template_field, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def rename_tag_template_field(self, - request: Optional[Union[datacatalog.RenameTagTemplateFieldRequest, dict]] = None, - *, - name: Optional[str] = None, - new_tag_template_field_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplateField: - r"""Renames a field in a tag template. The user should enable the - Data Catalog API in the project identified by the ``name`` - parameter (see `Data Catalog Resource - Project `__ - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_rename_tag_template_field(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.RenameTagTemplateFieldRequest( - name="name_value", - new_tag_template_field_id="new_tag_template_field_id_value", - ) - - # Make the request - response = await client.rename_tag_template_field(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.RenameTagTemplateFieldRequest, dict]]): - The request object. Request message for - [RenameTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.RenameTagTemplateField]. - name (:class:`str`): - Required. The name of the tag template. Example: - - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - new_tag_template_field_id (:class:`str`): - Required. The new ID of this tag template field. For - example, ``my_new_field``. - - This corresponds to the ``new_tag_template_field_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.TagTemplateField: - The template for an individual field - within a tag template. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, new_tag_template_field_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.RenameTagTemplateFieldRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if new_tag_template_field_id is not None: - request.new_tag_template_field_id = new_tag_template_field_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.rename_tag_template_field, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def rename_tag_template_field_enum_value(self, - request: Optional[Union[datacatalog.RenameTagTemplateFieldEnumValueRequest, dict]] = None, - *, - name: Optional[str] = None, - new_enum_value_display_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplateField: - r"""Renames an enum value in a tag template. The enum - values have to be unique within one enum field. Thus, an - enum value cannot be renamed with a name used in any - other enum value within the same enum field. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_rename_tag_template_field_enum_value(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.RenameTagTemplateFieldEnumValueRequest( - name="name_value", - new_enum_value_display_name="new_enum_value_display_name_value", - ) - - # Make the request - response = await client.rename_tag_template_field_enum_value(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.RenameTagTemplateFieldEnumValueRequest, dict]]): - The request object. Request message for - [RenameTagTemplateFieldEnumValue][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateFieldEnumValue]. - name (:class:`str`): - Required. The name of the enum field value. Example: - - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - new_enum_value_display_name (:class:`str`): - Required. The new display name of the enum value. For - example, ``my_new_enum_value``. - - This corresponds to the ``new_enum_value_display_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.TagTemplateField: - The template for an individual field - within a tag template. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, new_enum_value_display_name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.RenameTagTemplateFieldEnumValueRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if new_enum_value_display_name is not None: - request.new_enum_value_display_name = new_enum_value_display_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.rename_tag_template_field_enum_value, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_tag_template_field(self, - request: Optional[Union[datacatalog.DeleteTagTemplateFieldRequest, dict]] = None, - *, - name: Optional[str] = None, - force: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a field in a tag template and all uses of that field. - Users should enable the Data Catalog API in the project - identified by the ``name`` parameter (see [Data Catalog Resource - Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_delete_tag_template_field(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteTagTemplateFieldRequest( - name="name_value", - force=True, - ) - - # Make the request - await client.delete_tag_template_field(request=request) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.DeleteTagTemplateFieldRequest, dict]]): - The request object. Request message for - [DeleteTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplateField]. - name (:class:`str`): - Required. The name of the tag template field to delete. - Example: - - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - force (:class:`bool`): - Required. Currently, this field must always be set to - ``true``. This confirms the deletion of this field from - any tags using this field. ``force = false`` will be - supported in the future. - - This corresponds to the ``force`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, force]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.DeleteTagTemplateFieldRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if force is not None: - request.force = force - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_tag_template_field, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_tag(self, - request: Optional[Union[datacatalog.CreateTagRequest, dict]] = None, - *, - parent: Optional[str] = None, - tag: Optional[tags.Tag] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.Tag: - r"""Creates a tag on an - [Entry][google.cloud.datacatalog.v1beta1.Entry]. Note: The - project identified by the ``parent`` parameter for the - `tag `__ - and the `tag - template `__ - used to create the tag must be from the same organization. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_create_tag(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - tag = datacatalog_v1beta1.Tag() - tag.column = "column_value" - tag.template = "template_value" - - request = datacatalog_v1beta1.CreateTagRequest( - parent="parent_value", - tag=tag, - ) - - # Make the request - response = await client.create_tag(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.CreateTagRequest, dict]]): - The request object. Request message for - [CreateTag][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTag]. - parent (:class:`str`): - Required. The name of the resource to attach this tag - to. Tags can be attached to Entries. Example: - - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} - - Note that this Tag and its child resources may not - actually be stored in the location in this name. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag (:class:`google.cloud.datacatalog_v1beta1.types.Tag`): - Required. The tag to create. - This corresponds to the ``tag`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.Tag: - Tags are used to attach custom metadata to Data Catalog resources. Tags - conform to the specifications within their tag - template. - - See [Data Catalog - IAM](\ https://cloud.google.com/data-catalog/docs/concepts/iam) - for information on the permissions needed to create - or view tags. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, tag]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.CreateTagRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if tag is not None: - request.tag = tag - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_tag, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_tag(self, - request: Optional[Union[datacatalog.UpdateTagRequest, dict]] = None, - *, - tag: Optional[tags.Tag] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.Tag: - r"""Updates an existing tag. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_update_tag(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - tag = datacatalog_v1beta1.Tag() - tag.column = "column_value" - tag.template = "template_value" - - request = datacatalog_v1beta1.UpdateTagRequest( - tag=tag, - ) - - # Make the request - response = await client.update_tag(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.UpdateTagRequest, dict]]): - The request object. Request message for - [UpdateTag][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTag]. - tag (:class:`google.cloud.datacatalog_v1beta1.types.Tag`): - Required. The updated tag. The "name" - field must be set. - - This corresponds to the ``tag`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Note: Currently, this parameter can only take - ``"fields"`` as value. - - Names of fields whose values to overwrite on a tag. - Currently, a tag has the only modifiable field with the - name ``fields``. - - In general, if this parameter is absent or empty, all - modifiable fields are overwritten. If such fields are - non-required and omitted in the request body, their - values are emptied. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.Tag: - Tags are used to attach custom metadata to Data Catalog resources. Tags - conform to the specifications within their tag - template. - - See [Data Catalog - IAM](\ https://cloud.google.com/data-catalog/docs/concepts/iam) - for information on the permissions needed to create - or view tags. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([tag, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.UpdateTagRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if tag is not None: - request.tag = tag - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_tag, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("tag.name", request.tag.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_tag(self, - request: Optional[Union[datacatalog.DeleteTagRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a tag. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_delete_tag(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteTagRequest( - name="name_value", - ) - - # Make the request - await client.delete_tag(request=request) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.DeleteTagRequest, dict]]): - The request object. Request message for - [DeleteTag][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTag]. - name (:class:`str`): - Required. The name of the tag to delete. Example: - - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.DeleteTagRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_tag, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_tags(self, - request: Optional[Union[datacatalog.ListTagsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTagsAsyncPager: - r"""Lists tags assigned to an - [Entry][google.cloud.datacatalog.v1beta1.Entry]. The - [columns][google.cloud.datacatalog.v1beta1.Tag.column] in the - response are lowercased. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_list_tags(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ListTagsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tags(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.ListTagsRequest, dict]]): - The request object. Request message for - [ListTags][google.cloud.datacatalog.v1beta1.DataCatalog.ListTags]. - parent (:class:`str`): - Required. The name of the Data Catalog resource to list - the tags of. The resource could be an - [Entry][google.cloud.datacatalog.v1beta1.Entry] or an - [EntryGroup][google.cloud.datacatalog.v1beta1.EntryGroup]. - - Examples: - - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListTagsAsyncPager: - Response message for - [ListTags][google.cloud.datacatalog.v1beta1.DataCatalog.ListTags]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = datacatalog.ListTagsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_tags, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListTagsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy for a resource. Replaces any - existing policy. Supported resources are: - - - Tag templates. - - Entries. - - Entry groups. Note, this method cannot be used to manage - policies for BigQuery, Pub/Sub and any external Google Cloud - Platform resources synced to Data Catalog. - - Callers must have following Google IAM permission - - - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on - tag templates. - - ``datacatalog.entries.setIamPolicy`` to set policies on - entries. - - ``datacatalog.entryGroups.setIamPolicy`` to set policies on - entry groups. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_set_iam_policy(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): - The request object. Request message for ``SetIamPolicy`` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, ) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for a resource. A ``NOT_FOUND`` - error is returned if the resource does not exist. An empty - policy is returned if the resource exists but does not have a - policy set on it. - - Supported resources are: - - - Tag templates. - - Entries. - - Entry groups. Note, this method cannot be used to manage - policies for BigQuery, Pub/Sub and any external Google Cloud - Platform resources synced to Data Catalog. - - Callers must have following Google IAM permission - - - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on - tag templates. - - ``datacatalog.entries.getIamPolicy`` to get policies on - entries. - - ``datacatalog.entryGroups.getIamPolicy`` to get policies on - entry groups. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_get_iam_policy(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): - The request object. Request message for ``GetIamPolicy`` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.GetIamPolicyRequest(resource=resource, ) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns the caller's permissions on a resource. If the resource - does not exist, an empty set of permissions is returned (We - don't return a ``NOT_FOUND`` error). - - Supported resources are: - - - Tag templates. - - Entries. - - Entry groups. Note, this method cannot be used to manage - policies for BigQuery, Pub/Sub and any external Google Cloud - Platform resources synced to Data Catalog. - - A caller is not required to have Google IAM permission to make - this request. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_test_iam_permissions(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "DataCatalogAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DataCatalogAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py deleted file mode 100644 index b7bd21b66698..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py +++ /dev/null @@ -1,3904 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.datacatalog_v1beta1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.datacatalog_v1beta1.services.data_catalog import pagers -from google.cloud.datacatalog_v1beta1.types import common -from google.cloud.datacatalog_v1beta1.types import datacatalog -from google.cloud.datacatalog_v1beta1.types import gcs_fileset_spec -from google.cloud.datacatalog_v1beta1.types import schema -from google.cloud.datacatalog_v1beta1.types import search -from google.cloud.datacatalog_v1beta1.types import table_spec -from google.cloud.datacatalog_v1beta1.types import tags -from google.cloud.datacatalog_v1beta1.types import timestamps -from google.cloud.datacatalog_v1beta1.types import usage -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import DataCatalogGrpcTransport -from .transports.grpc_asyncio import DataCatalogGrpcAsyncIOTransport - - -class DataCatalogClientMeta(type): - """Metaclass for the DataCatalog client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[DataCatalogTransport]] - _transport_registry["grpc"] = DataCatalogGrpcTransport - _transport_registry["grpc_asyncio"] = DataCatalogGrpcAsyncIOTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[DataCatalogTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class DataCatalogClient(metaclass=DataCatalogClientMeta): - """Data Catalog API service allows clients to discover, - understand, and manage their data. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "datacatalog.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataCatalogClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataCatalogClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DataCatalogTransport: - """Returns the transport used by the client instance. - - Returns: - DataCatalogTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def entry_path(project: str,location: str,entry_group: str,entry: str,) -> str: - """Returns a fully-qualified entry string.""" - return "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format(project=project, location=location, entry_group=entry_group, entry=entry, ) - - @staticmethod - def parse_entry_path(path: str) -> Dict[str,str]: - """Parses a entry path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)/entries/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def entry_group_path(project: str,location: str,entry_group: str,) -> str: - """Returns a fully-qualified entry_group string.""" - return "projects/{project}/locations/{location}/entryGroups/{entry_group}".format(project=project, location=location, entry_group=entry_group, ) - - @staticmethod - def parse_entry_group_path(path: str) -> Dict[str,str]: - """Parses a entry_group path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def tag_path(project: str,location: str,entry_group: str,entry: str,tag: str,) -> str: - """Returns a fully-qualified tag string.""" - return "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format(project=project, location=location, entry_group=entry_group, entry=entry, tag=tag, ) - - @staticmethod - def parse_tag_path(path: str) -> Dict[str,str]: - """Parses a tag path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)/entries/(?P.+?)/tags/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def tag_template_path(project: str,location: str,tag_template: str,) -> str: - """Returns a fully-qualified tag_template string.""" - return "projects/{project}/locations/{location}/tagTemplates/{tag_template}".format(project=project, location=location, tag_template=tag_template, ) - - @staticmethod - def parse_tag_template_path(path: str) -> Dict[str,str]: - """Parses a tag_template path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/tagTemplates/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def tag_template_field_path(project: str,location: str,tag_template: str,field: str,) -> str: - """Returns a fully-qualified tag_template_field string.""" - return "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}".format(project=project, location=location, tag_template=tag_template, field=field, ) - - @staticmethod - def parse_tag_template_field_path(path: str) -> Dict[str,str]: - """Parses a tag_template_field path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/tagTemplates/(?P.+?)/fields/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def tag_template_field_enum_value_path(project: str,location: str,tag_template: str,tag_template_field_id: str,enum_value_display_name: str,) -> str: - """Returns a fully-qualified tag_template_field_enum_value string.""" - return "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name}".format(project=project, location=location, tag_template=tag_template, tag_template_field_id=tag_template_field_id, enum_value_display_name=enum_value_display_name, ) - - @staticmethod - def parse_tag_template_field_enum_value_path(path: str) -> Dict[str,str]: - """Parses a tag_template_field_enum_value path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/tagTemplates/(?P.+?)/fields/(?P.+?)/enumValues/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataCatalogTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the data catalog client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, DataCatalogTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, DataCatalogTransport): - # transport is a DataCatalogTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def search_catalog(self, - request: Optional[Union[datacatalog.SearchCatalogRequest, dict]] = None, - *, - scope: Optional[datacatalog.SearchCatalogRequest.Scope] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.SearchCatalogPager: - r"""Searches Data Catalog for multiple resources like entries, tags - that match a query. - - This is a custom method - (https://cloud.google.com/apis/design/custom_methods) and does - not return the complete resource, only the resource identifier - and high level fields. Clients can subsequently call ``Get`` - methods. - - Note that Data Catalog search queries do not guarantee full - recall. Query results that match your query may not be returned, - even in subsequent result pages. Also note that results returned - (and not returned) can vary across repeated search queries. - - See `Data Catalog Search - Syntax `__ - for more information. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_search_catalog(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.SearchCatalogRequest( - ) - - # Make the request - page_result = client.search_catalog(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.SearchCatalogRequest, dict]): - The request object. Request message for - [SearchCatalog][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog]. - scope (google.cloud.datacatalog_v1beta1.types.SearchCatalogRequest.Scope): - Required. The scope of this search request. A ``scope`` - that has empty ``include_org_ids``, - ``include_project_ids`` AND false - ``include_gcp_public_datasets`` is considered invalid. - Data Catalog will return an error in such a case. - - This corresponds to the ``scope`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - query (str): - Optional. The query string in search query syntax. An - empty query string will result in all data assets (in - the specified scope) that the user has access to. Query - strings can be simple as "x" or more qualified as: - - - name:x - - column:x - - description:y - - Note: Query tokens need to have a minimum of 3 - characters for substring matching to work correctly. See - `Data Catalog Search - Syntax `__ - for more information. - - This corresponds to the ``query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.SearchCatalogPager: - Response message for - [SearchCatalog][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, query]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.SearchCatalogRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.SearchCatalogRequest): - request = datacatalog.SearchCatalogRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if scope is not None: - request.scope = scope - if query is not None: - request.query = query - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.search_catalog] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.SearchCatalogPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_entry_group(self, - request: Optional[Union[datacatalog.CreateEntryGroupRequest, dict]] = None, - *, - parent: Optional[str] = None, - entry_group_id: Optional[str] = None, - entry_group: Optional[datacatalog.EntryGroup] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.EntryGroup: - r"""A maximum of 10,000 entry groups may be created per organization - across all locations. - - Users should enable the Data Catalog API in the project - identified by the ``parent`` parameter (see [Data Catalog - Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_create_entry_group(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.CreateEntryGroupRequest( - parent="parent_value", - entry_group_id="entry_group_id_value", - ) - - # Make the request - response = client.create_entry_group(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.CreateEntryGroupRequest, dict]): - The request object. Request message for - [CreateEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntryGroup]. - parent (str): - Required. The name of the project this entry group is - in. Example: - - - projects/{project_id}/locations/{location} - - Note that this EntryGroup and its child resources may - not actually be stored in the location in this name. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_group_id (str): - Required. The id of the entry group - to create. The id must begin with a - letter or underscore, contain only - English letters, numbers and - underscores, and be at most 64 - characters. - - This corresponds to the ``entry_group_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_group (google.cloud.datacatalog_v1beta1.types.EntryGroup): - The entry group to create. Defaults - to an empty entry group. - - This corresponds to the ``entry_group`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.EntryGroup: - EntryGroup Metadata. - An EntryGroup resource represents a logical grouping - of zero or more Data Catalog - [Entry][google.cloud.datacatalog.v1beta1.Entry] - resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, entry_group_id, entry_group]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.CreateEntryGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.CreateEntryGroupRequest): - request = datacatalog.CreateEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entry_group_id is not None: - request.entry_group_id = entry_group_id - if entry_group is not None: - request.entry_group = entry_group - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_entry_group(self, - request: Optional[Union[datacatalog.UpdateEntryGroupRequest, dict]] = None, - *, - entry_group: Optional[datacatalog.EntryGroup] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.EntryGroup: - r"""Updates an EntryGroup. The user should enable the Data Catalog - API in the project identified by the ``entry_group.name`` - parameter (see [Data Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_update_entry_group(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.UpdateEntryGroupRequest( - ) - - # Make the request - response = client.update_entry_group(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.UpdateEntryGroupRequest, dict]): - The request object. Request message for - [UpdateEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntryGroup]. - entry_group (google.cloud.datacatalog_v1beta1.types.EntryGroup): - Required. The updated entry group. - "name" field must be set. - - This corresponds to the ``entry_group`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Names of fields whose values to - overwrite on an entry group. - If this parameter is absent or empty, - all modifiable fields are overwritten. - If such fields are non-required and - omitted in the request body, their - values are emptied. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.EntryGroup: - EntryGroup Metadata. - An EntryGroup resource represents a logical grouping - of zero or more Data Catalog - [Entry][google.cloud.datacatalog.v1beta1.Entry] - resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([entry_group, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.UpdateEntryGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.UpdateEntryGroupRequest): - request = datacatalog.UpdateEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if entry_group is not None: - request.entry_group = entry_group - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entry_group.name", request.entry_group.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_entry_group(self, - request: Optional[Union[datacatalog.GetEntryGroupRequest, dict]] = None, - *, - name: Optional[str] = None, - read_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.EntryGroup: - r"""Gets an EntryGroup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_get_entry_group(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.GetEntryGroupRequest( - name="name_value", - ) - - # Make the request - response = client.get_entry_group(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.GetEntryGroupRequest, dict]): - The request object. Request message for - [GetEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.GetEntryGroup]. - name (str): - Required. The name of the entry group. For example, - ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - read_mask (google.protobuf.field_mask_pb2.FieldMask): - The fields to return. If not set or - empty, all fields are returned. - - This corresponds to the ``read_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.EntryGroup: - EntryGroup Metadata. - An EntryGroup resource represents a logical grouping - of zero or more Data Catalog - [Entry][google.cloud.datacatalog.v1beta1.Entry] - resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, read_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.GetEntryGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.GetEntryGroupRequest): - request = datacatalog.GetEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if read_mask is not None: - request.read_mask = read_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_entry_group(self, - request: Optional[Union[datacatalog.DeleteEntryGroupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an EntryGroup. Only entry groups that do not contain - entries can be deleted. Users should enable the Data Catalog API - in the project identified by the ``name`` parameter (see [Data - Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_delete_entry_group(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteEntryGroupRequest( - name="name_value", - ) - - # Make the request - client.delete_entry_group(request=request) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.DeleteEntryGroupRequest, dict]): - The request object. Request message for - [DeleteEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntryGroup]. - name (str): - Required. The name of the entry group. For example, - ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.DeleteEntryGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.DeleteEntryGroupRequest): - request = datacatalog.DeleteEntryGroupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_entry_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def list_entry_groups(self, - request: Optional[Union[datacatalog.ListEntryGroupsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListEntryGroupsPager: - r"""Lists entry groups. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_list_entry_groups(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ListEntryGroupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_groups(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.ListEntryGroupsRequest, dict]): - The request object. Request message for - [ListEntryGroups][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntryGroups]. - parent (str): - Required. The name of the location that contains the - entry groups, which can be provided in URL format. - Example: - - - projects/{project_id}/locations/{location} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListEntryGroupsPager: - Response message for - [ListEntryGroups][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntryGroups]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.ListEntryGroupsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.ListEntryGroupsRequest): - request = datacatalog.ListEntryGroupsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_entry_groups] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListEntryGroupsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_entry(self, - request: Optional[Union[datacatalog.CreateEntryRequest, dict]] = None, - *, - parent: Optional[str] = None, - entry_id: Optional[str] = None, - entry: Optional[datacatalog.Entry] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.Entry: - r"""Creates an entry. Only entries of 'FILESET' type or - user-specified type can be created. - - Users should enable the Data Catalog API in the project - identified by the ``parent`` parameter (see [Data Catalog - Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - A maximum of 100,000 entries may be created per entry group. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_create_entry(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - entry = datacatalog_v1beta1.Entry() - entry.type_ = "FILESET" - entry.integrated_system = "CLOUD_PUBSUB" - entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] - - request = datacatalog_v1beta1.CreateEntryRequest( - parent="parent_value", - entry_id="entry_id_value", - entry=entry, - ) - - # Make the request - response = client.create_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.CreateEntryRequest, dict]): - The request object. Request message for - [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry]. - parent (str): - Required. The name of the entry group this entry is in. - Example: - - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} - - Note that this Entry and its child resources may not - actually be stored in the location in this name. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry_id (str): - Required. The id of the entry to - create. - - This corresponds to the ``entry_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - entry (google.cloud.datacatalog_v1beta1.types.Entry): - Required. The entry to create. - This corresponds to the ``entry`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.Entry: - Entry Metadata. - A Data Catalog Entry resource represents another - resource in Google Cloud Platform (such as a BigQuery - dataset or a Pub/Sub topic), or outside of Google - Cloud Platform. Clients can use the linked_resource - field in the Entry resource to refer to the original - resource ID of the source system. - - An Entry resource contains resource details, such as - its schema. An Entry can also be used to attach - flexible metadata, such as a - [Tag][google.cloud.datacatalog.v1beta1.Tag]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, entry_id, entry]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.CreateEntryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.CreateEntryRequest): - request = datacatalog.CreateEntryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if entry_id is not None: - request.entry_id = entry_id - if entry is not None: - request.entry = entry - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_entry(self, - request: Optional[Union[datacatalog.UpdateEntryRequest, dict]] = None, - *, - entry: Optional[datacatalog.Entry] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.Entry: - r"""Updates an existing entry. Users should enable the Data Catalog - API in the project identified by the ``entry.name`` parameter - (see [Data Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_update_entry(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - entry = datacatalog_v1beta1.Entry() - entry.type_ = "FILESET" - entry.integrated_system = "CLOUD_PUBSUB" - entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] - - request = datacatalog_v1beta1.UpdateEntryRequest( - entry=entry, - ) - - # Make the request - response = client.update_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.UpdateEntryRequest, dict]): - The request object. Request message for - [UpdateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntry]. - entry (google.cloud.datacatalog_v1beta1.types.Entry): - Required. The updated entry. The - "name" field must be set. - - This corresponds to the ``entry`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Names of fields whose values to overwrite on an entry. - - If this parameter is absent or empty, all modifiable - fields are overwritten. If such fields are non-required - and omitted in the request body, their values are - emptied. - - The following fields are modifiable: - - - For entries with type ``DATA_STREAM``: - - - ``schema`` - - - For entries with type ``FILESET``: - - - ``schema`` - - ``display_name`` - - ``description`` - - ``gcs_fileset_spec`` - - ``gcs_fileset_spec.file_patterns`` - - - For entries with ``user_specified_type``: - - - ``schema`` - - ``display_name`` - - ``description`` - - ``user_specified_type`` - - ``user_specified_system`` - - ``linked_resource`` - - ``source_system_timestamps`` - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.Entry: - Entry Metadata. - A Data Catalog Entry resource represents another - resource in Google Cloud Platform (such as a BigQuery - dataset or a Pub/Sub topic), or outside of Google - Cloud Platform. Clients can use the linked_resource - field in the Entry resource to refer to the original - resource ID of the source system. - - An Entry resource contains resource details, such as - its schema. An Entry can also be used to attach - flexible metadata, such as a - [Tag][google.cloud.datacatalog.v1beta1.Tag]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([entry, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.UpdateEntryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.UpdateEntryRequest): - request = datacatalog.UpdateEntryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if entry is not None: - request.entry = entry - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("entry.name", request.entry.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_entry(self, - request: Optional[Union[datacatalog.DeleteEntryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an existing entry. Only entries created through - [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry] - method can be deleted. Users should enable the Data Catalog API - in the project identified by the ``name`` parameter (see [Data - Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_delete_entry(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteEntryRequest( - name="name_value", - ) - - # Make the request - client.delete_entry(request=request) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.DeleteEntryRequest, dict]): - The request object. Request message for - [DeleteEntry][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntry]. - name (str): - Required. The name of the entry. Example: - - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.DeleteEntryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.DeleteEntryRequest): - request = datacatalog.DeleteEntryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_entry(self, - request: Optional[Union[datacatalog.GetEntryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.Entry: - r"""Gets an entry. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_get_entry(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.GetEntryRequest( - name="name_value", - ) - - # Make the request - response = client.get_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.GetEntryRequest, dict]): - The request object. Request message for - [GetEntry][google.cloud.datacatalog.v1beta1.DataCatalog.GetEntry]. - name (str): - Required. The name of the entry. Example: - - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.Entry: - Entry Metadata. - A Data Catalog Entry resource represents another - resource in Google Cloud Platform (such as a BigQuery - dataset or a Pub/Sub topic), or outside of Google - Cloud Platform. Clients can use the linked_resource - field in the Entry resource to refer to the original - resource ID of the source system. - - An Entry resource contains resource details, such as - its schema. An Entry can also be used to attach - flexible metadata, such as a - [Tag][google.cloud.datacatalog.v1beta1.Tag]. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.GetEntryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.GetEntryRequest): - request = datacatalog.GetEntryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_entry] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def lookup_entry(self, - request: Optional[Union[datacatalog.LookupEntryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> datacatalog.Entry: - r"""Get an entry by target resource name. This method - allows clients to use the resource name from the source - Google Cloud Platform service to get the Data Catalog - Entry. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_lookup_entry(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.LookupEntryRequest( - linked_resource="linked_resource_value", - ) - - # Make the request - response = client.lookup_entry(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.LookupEntryRequest, dict]): - The request object. Request message for - [LookupEntry][google.cloud.datacatalog.v1beta1.DataCatalog.LookupEntry]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.Entry: - Entry Metadata. - A Data Catalog Entry resource represents another - resource in Google Cloud Platform (such as a BigQuery - dataset or a Pub/Sub topic), or outside of Google - Cloud Platform. Clients can use the linked_resource - field in the Entry resource to refer to the original - resource ID of the source system. - - An Entry resource contains resource details, such as - its schema. An Entry can also be used to attach - flexible metadata, such as a - [Tag][google.cloud.datacatalog.v1beta1.Tag]. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.LookupEntryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.LookupEntryRequest): - request = datacatalog.LookupEntryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.lookup_entry] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_entries(self, - request: Optional[Union[datacatalog.ListEntriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListEntriesPager: - r"""Lists entries. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_list_entries(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ListEntriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entries(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.ListEntriesRequest, dict]): - The request object. Request message for - [ListEntries][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntries]. - parent (str): - Required. The name of the entry group that contains the - entries, which can be provided in URL format. Example: - - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListEntriesPager: - Response message for - [ListEntries][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntries]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.ListEntriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.ListEntriesRequest): - request = datacatalog.ListEntriesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_entries] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListEntriesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_tag_template(self, - request: Optional[Union[datacatalog.CreateTagTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - tag_template_id: Optional[str] = None, - tag_template: Optional[tags.TagTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplate: - r"""Creates a tag template. The user should enable the Data Catalog - API in the project identified by the ``parent`` parameter (see - `Data Catalog Resource - Project `__ - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_create_tag_template(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.CreateTagTemplateRequest( - parent="parent_value", - tag_template_id="tag_template_id_value", - ) - - # Make the request - response = client.create_tag_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.CreateTagTemplateRequest, dict]): - The request object. Request message for - [CreateTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplate]. - parent (str): - Required. The name of the project and the template - location - [region](https://cloud.google.com/data-catalog/docs/concepts/regions. - - Example: - - - projects/{project_id}/locations/us-central1 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag_template_id (str): - Required. The id of the tag template - to create. - - This corresponds to the ``tag_template_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag_template (google.cloud.datacatalog_v1beta1.types.TagTemplate): - Required. The tag template to create. - This corresponds to the ``tag_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.TagTemplate: - A tag template defines a tag, which can have one or more typed fields. - The template is used to create and attach the tag to - Google Cloud resources. [Tag template - roles](\ https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) - provide permissions to create, edit, and use the - template. See, for example, the [TagTemplate - User](\ https://cloud.google.com/data-catalog/docs/how-to/template-user) - role, which includes permission to use the tag - template to tag resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, tag_template_id, tag_template]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.CreateTagTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.CreateTagTemplateRequest): - request = datacatalog.CreateTagTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if tag_template_id is not None: - request.tag_template_id = tag_template_id - if tag_template is not None: - request.tag_template = tag_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_tag_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_tag_template(self, - request: Optional[Union[datacatalog.GetTagTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplate: - r"""Gets a tag template. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_get_tag_template(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.GetTagTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_tag_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.GetTagTemplateRequest, dict]): - The request object. Request message for - [GetTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.GetTagTemplate]. - name (str): - Required. The name of the tag template. Example: - - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.TagTemplate: - A tag template defines a tag, which can have one or more typed fields. - The template is used to create and attach the tag to - Google Cloud resources. [Tag template - roles](\ https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) - provide permissions to create, edit, and use the - template. See, for example, the [TagTemplate - User](\ https://cloud.google.com/data-catalog/docs/how-to/template-user) - role, which includes permission to use the tag - template to tag resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.GetTagTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.GetTagTemplateRequest): - request = datacatalog.GetTagTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_tag_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_tag_template(self, - request: Optional[Union[datacatalog.UpdateTagTemplateRequest, dict]] = None, - *, - tag_template: Optional[tags.TagTemplate] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplate: - r"""Updates a tag template. This method cannot be used to update the - fields of a template. The tag template fields are represented as - separate resources and should be updated using their own - create/update/delete methods. Users should enable the Data - Catalog API in the project identified by the - ``tag_template.name`` parameter (see [Data Catalog Resource - Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_update_tag_template(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.UpdateTagTemplateRequest( - ) - - # Make the request - response = client.update_tag_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.UpdateTagTemplateRequest, dict]): - The request object. Request message for - [UpdateTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplate]. - tag_template (google.cloud.datacatalog_v1beta1.types.TagTemplate): - Required. The template to update. The - "name" field must be set. - - This corresponds to the ``tag_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Names of fields whose values to overwrite on a tag - template. Currently, only ``display_name`` can be - overwritten. - - In general, if this parameter is absent or empty, all - modifiable fields are overwritten. If such fields are - non-required and omitted in the request body, their - values are emptied. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.TagTemplate: - A tag template defines a tag, which can have one or more typed fields. - The template is used to create and attach the tag to - Google Cloud resources. [Tag template - roles](\ https://cloud.google.com/iam/docs/understanding-roles#data-catalog-roles) - provide permissions to create, edit, and use the - template. See, for example, the [TagTemplate - User](\ https://cloud.google.com/data-catalog/docs/how-to/template-user) - role, which includes permission to use the tag - template to tag resources. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([tag_template, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.UpdateTagTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.UpdateTagTemplateRequest): - request = datacatalog.UpdateTagTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if tag_template is not None: - request.tag_template = tag_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_tag_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("tag_template.name", request.tag_template.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_tag_template(self, - request: Optional[Union[datacatalog.DeleteTagTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - force: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a tag template and all tags using the template. Users - should enable the Data Catalog API in the project identified by - the ``name`` parameter (see [Data Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_delete_tag_template(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteTagTemplateRequest( - name="name_value", - force=True, - ) - - # Make the request - client.delete_tag_template(request=request) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.DeleteTagTemplateRequest, dict]): - The request object. Request message for - [DeleteTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplate]. - name (str): - Required. The name of the tag template to delete. - Example: - - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - force (bool): - Required. Currently, this field must always be set to - ``true``. This confirms the deletion of any possible - tags using this template. ``force = false`` will be - supported in the future. - - This corresponds to the ``force`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, force]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.DeleteTagTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.DeleteTagTemplateRequest): - request = datacatalog.DeleteTagTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if force is not None: - request.force = force - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_tag_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_tag_template_field(self, - request: Optional[Union[datacatalog.CreateTagTemplateFieldRequest, dict]] = None, - *, - parent: Optional[str] = None, - tag_template_field_id: Optional[str] = None, - tag_template_field: Optional[tags.TagTemplateField] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplateField: - r"""Creates a field in a tag template. The user should enable the - Data Catalog API in the project identified by the ``parent`` - parameter (see `Data Catalog Resource - Project `__ - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_create_tag_template_field(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - tag_template_field = datacatalog_v1beta1.TagTemplateField() - tag_template_field.type_.primitive_type = "TIMESTAMP" - - request = datacatalog_v1beta1.CreateTagTemplateFieldRequest( - parent="parent_value", - tag_template_field_id="tag_template_field_id_value", - tag_template_field=tag_template_field, - ) - - # Make the request - response = client.create_tag_template_field(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.CreateTagTemplateFieldRequest, dict]): - The request object. Request message for - [CreateTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplateField]. - parent (str): - Required. The name of the project and the template - location - `region `__. - - Example: - - - projects/{project_id}/locations/us-central1/tagTemplates/{tag_template_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag_template_field_id (str): - Required. The ID of the tag template field to create. - Field ids can contain letters (both uppercase and - lowercase), numbers (0-9), underscores (_) and dashes - (-). Field IDs must be at least 1 character long and at - most 128 characters long. Field IDs must also be unique - within their template. - - This corresponds to the ``tag_template_field_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag_template_field (google.cloud.datacatalog_v1beta1.types.TagTemplateField): - Required. The tag template field to - create. - - This corresponds to the ``tag_template_field`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.TagTemplateField: - The template for an individual field - within a tag template. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, tag_template_field_id, tag_template_field]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.CreateTagTemplateFieldRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.CreateTagTemplateFieldRequest): - request = datacatalog.CreateTagTemplateFieldRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if tag_template_field_id is not None: - request.tag_template_field_id = tag_template_field_id - if tag_template_field is not None: - request.tag_template_field = tag_template_field - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_tag_template_field] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_tag_template_field(self, - request: Optional[Union[datacatalog.UpdateTagTemplateFieldRequest, dict]] = None, - *, - name: Optional[str] = None, - tag_template_field: Optional[tags.TagTemplateField] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplateField: - r"""Updates a field in a tag template. This method cannot be used to - update the field type. Users should enable the Data Catalog API - in the project identified by the ``name`` parameter (see [Data - Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_update_tag_template_field(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - tag_template_field = datacatalog_v1beta1.TagTemplateField() - tag_template_field.type_.primitive_type = "TIMESTAMP" - - request = datacatalog_v1beta1.UpdateTagTemplateFieldRequest( - name="name_value", - tag_template_field=tag_template_field, - ) - - # Make the request - response = client.update_tag_template_field(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.UpdateTagTemplateFieldRequest, dict]): - The request object. Request message for - [UpdateTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplateField]. - name (str): - Required. The name of the tag template field. Example: - - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag_template_field (google.cloud.datacatalog_v1beta1.types.TagTemplateField): - Required. The template to update. - This corresponds to the ``tag_template_field`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Names of fields whose values to overwrite on - an individual field of a tag template. The following - fields are modifiable: - - - ``display_name`` - - ``type.enum_type`` - - ``is_required`` - - If this parameter is absent or empty, all modifiable - fields are overwritten. If such fields are non-required - and omitted in the request body, their values are - emptied with one exception: when updating an enum type, - the provided values are merged with the existing values. - Therefore, enum values can only be added, existing enum - values cannot be deleted or renamed. - - Additionally, updating a template field from optional to - required is *not* allowed. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.TagTemplateField: - The template for an individual field - within a tag template. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, tag_template_field, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.UpdateTagTemplateFieldRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.UpdateTagTemplateFieldRequest): - request = datacatalog.UpdateTagTemplateFieldRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if tag_template_field is not None: - request.tag_template_field = tag_template_field - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_tag_template_field] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def rename_tag_template_field(self, - request: Optional[Union[datacatalog.RenameTagTemplateFieldRequest, dict]] = None, - *, - name: Optional[str] = None, - new_tag_template_field_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplateField: - r"""Renames a field in a tag template. The user should enable the - Data Catalog API in the project identified by the ``name`` - parameter (see `Data Catalog Resource - Project `__ - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_rename_tag_template_field(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.RenameTagTemplateFieldRequest( - name="name_value", - new_tag_template_field_id="new_tag_template_field_id_value", - ) - - # Make the request - response = client.rename_tag_template_field(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.RenameTagTemplateFieldRequest, dict]): - The request object. Request message for - [RenameTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.RenameTagTemplateField]. - name (str): - Required. The name of the tag template. Example: - - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - new_tag_template_field_id (str): - Required. The new ID of this tag template field. For - example, ``my_new_field``. - - This corresponds to the ``new_tag_template_field_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.TagTemplateField: - The template for an individual field - within a tag template. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, new_tag_template_field_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.RenameTagTemplateFieldRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.RenameTagTemplateFieldRequest): - request = datacatalog.RenameTagTemplateFieldRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if new_tag_template_field_id is not None: - request.new_tag_template_field_id = new_tag_template_field_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.rename_tag_template_field] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def rename_tag_template_field_enum_value(self, - request: Optional[Union[datacatalog.RenameTagTemplateFieldEnumValueRequest, dict]] = None, - *, - name: Optional[str] = None, - new_enum_value_display_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.TagTemplateField: - r"""Renames an enum value in a tag template. The enum - values have to be unique within one enum field. Thus, an - enum value cannot be renamed with a name used in any - other enum value within the same enum field. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_rename_tag_template_field_enum_value(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.RenameTagTemplateFieldEnumValueRequest( - name="name_value", - new_enum_value_display_name="new_enum_value_display_name_value", - ) - - # Make the request - response = client.rename_tag_template_field_enum_value(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.RenameTagTemplateFieldEnumValueRequest, dict]): - The request object. Request message for - [RenameTagTemplateFieldEnumValue][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateFieldEnumValue]. - name (str): - Required. The name of the enum field value. Example: - - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - new_enum_value_display_name (str): - Required. The new display name of the enum value. For - example, ``my_new_enum_value``. - - This corresponds to the ``new_enum_value_display_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.TagTemplateField: - The template for an individual field - within a tag template. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, new_enum_value_display_name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.RenameTagTemplateFieldEnumValueRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.RenameTagTemplateFieldEnumValueRequest): - request = datacatalog.RenameTagTemplateFieldEnumValueRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if new_enum_value_display_name is not None: - request.new_enum_value_display_name = new_enum_value_display_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.rename_tag_template_field_enum_value] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_tag_template_field(self, - request: Optional[Union[datacatalog.DeleteTagTemplateFieldRequest, dict]] = None, - *, - name: Optional[str] = None, - force: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a field in a tag template and all uses of that field. - Users should enable the Data Catalog API in the project - identified by the ``name`` parameter (see [Data Catalog Resource - Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_delete_tag_template_field(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteTagTemplateFieldRequest( - name="name_value", - force=True, - ) - - # Make the request - client.delete_tag_template_field(request=request) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.DeleteTagTemplateFieldRequest, dict]): - The request object. Request message for - [DeleteTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplateField]. - name (str): - Required. The name of the tag template field to delete. - Example: - - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - force (bool): - Required. Currently, this field must always be set to - ``true``. This confirms the deletion of this field from - any tags using this field. ``force = false`` will be - supported in the future. - - This corresponds to the ``force`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, force]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.DeleteTagTemplateFieldRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.DeleteTagTemplateFieldRequest): - request = datacatalog.DeleteTagTemplateFieldRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if force is not None: - request.force = force - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_tag_template_field] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_tag(self, - request: Optional[Union[datacatalog.CreateTagRequest, dict]] = None, - *, - parent: Optional[str] = None, - tag: Optional[tags.Tag] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.Tag: - r"""Creates a tag on an - [Entry][google.cloud.datacatalog.v1beta1.Entry]. Note: The - project identified by the ``parent`` parameter for the - `tag `__ - and the `tag - template `__ - used to create the tag must be from the same organization. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_create_tag(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - tag = datacatalog_v1beta1.Tag() - tag.column = "column_value" - tag.template = "template_value" - - request = datacatalog_v1beta1.CreateTagRequest( - parent="parent_value", - tag=tag, - ) - - # Make the request - response = client.create_tag(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.CreateTagRequest, dict]): - The request object. Request message for - [CreateTag][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTag]. - parent (str): - Required. The name of the resource to attach this tag - to. Tags can be attached to Entries. Example: - - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} - - Note that this Tag and its child resources may not - actually be stored in the location in this name. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - tag (google.cloud.datacatalog_v1beta1.types.Tag): - Required. The tag to create. - This corresponds to the ``tag`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.Tag: - Tags are used to attach custom metadata to Data Catalog resources. Tags - conform to the specifications within their tag - template. - - See [Data Catalog - IAM](\ https://cloud.google.com/data-catalog/docs/concepts/iam) - for information on the permissions needed to create - or view tags. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, tag]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.CreateTagRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.CreateTagRequest): - request = datacatalog.CreateTagRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if tag is not None: - request.tag = tag - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_tag] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_tag(self, - request: Optional[Union[datacatalog.UpdateTagRequest, dict]] = None, - *, - tag: Optional[tags.Tag] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> tags.Tag: - r"""Updates an existing tag. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_update_tag(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - tag = datacatalog_v1beta1.Tag() - tag.column = "column_value" - tag.template = "template_value" - - request = datacatalog_v1beta1.UpdateTagRequest( - tag=tag, - ) - - # Make the request - response = client.update_tag(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.UpdateTagRequest, dict]): - The request object. Request message for - [UpdateTag][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTag]. - tag (google.cloud.datacatalog_v1beta1.types.Tag): - Required. The updated tag. The "name" - field must be set. - - This corresponds to the ``tag`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Note: Currently, this parameter can only take - ``"fields"`` as value. - - Names of fields whose values to overwrite on a tag. - Currently, a tag has the only modifiable field with the - name ``fields``. - - In general, if this parameter is absent or empty, all - modifiable fields are overwritten. If such fields are - non-required and omitted in the request body, their - values are emptied. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.Tag: - Tags are used to attach custom metadata to Data Catalog resources. Tags - conform to the specifications within their tag - template. - - See [Data Catalog - IAM](\ https://cloud.google.com/data-catalog/docs/concepts/iam) - for information on the permissions needed to create - or view tags. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([tag, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.UpdateTagRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.UpdateTagRequest): - request = datacatalog.UpdateTagRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if tag is not None: - request.tag = tag - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_tag] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("tag.name", request.tag.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_tag(self, - request: Optional[Union[datacatalog.DeleteTagRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a tag. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_delete_tag(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteTagRequest( - name="name_value", - ) - - # Make the request - client.delete_tag(request=request) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.DeleteTagRequest, dict]): - The request object. Request message for - [DeleteTag][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTag]. - name (str): - Required. The name of the tag to delete. Example: - - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.DeleteTagRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.DeleteTagRequest): - request = datacatalog.DeleteTagRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_tag] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def list_tags(self, - request: Optional[Union[datacatalog.ListTagsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTagsPager: - r"""Lists tags assigned to an - [Entry][google.cloud.datacatalog.v1beta1.Entry]. The - [columns][google.cloud.datacatalog.v1beta1.Tag.column] in the - response are lowercased. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_list_tags(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ListTagsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tags(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.ListTagsRequest, dict]): - The request object. Request message for - [ListTags][google.cloud.datacatalog.v1beta1.DataCatalog.ListTags]. - parent (str): - Required. The name of the Data Catalog resource to list - the tags of. The resource could be an - [Entry][google.cloud.datacatalog.v1beta1.Entry] or an - [EntryGroup][google.cloud.datacatalog.v1beta1.EntryGroup]. - - Examples: - - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListTagsPager: - Response message for - [ListTags][google.cloud.datacatalog.v1beta1.DataCatalog.ListTags]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a datacatalog.ListTagsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, datacatalog.ListTagsRequest): - request = datacatalog.ListTagsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_tags] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListTagsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy for a resource. Replaces any - existing policy. Supported resources are: - - - Tag templates. - - Entries. - - Entry groups. Note, this method cannot be used to manage - policies for BigQuery, Pub/Sub and any external Google Cloud - Platform resources synced to Data Catalog. - - Callers must have following Google IAM permission - - - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on - tag templates. - - ``datacatalog.entries.setIamPolicy`` to set policies on - entries. - - ``datacatalog.entryGroups.setIamPolicy`` to set policies on - entry groups. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_set_iam_policy(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): - The request object. Request message for ``SetIamPolicy`` method. - resource (str): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.SetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for a resource. A ``NOT_FOUND`` - error is returned if the resource does not exist. An empty - policy is returned if the resource exists but does not have a - policy set on it. - - Supported resources are: - - - Tag templates. - - Entries. - - Entry groups. Note, this method cannot be used to manage - policies for BigQuery, Pub/Sub and any external Google Cloud - Platform resources synced to Data Catalog. - - Callers must have following Google IAM permission - - - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on - tag templates. - - ``datacatalog.entries.getIamPolicy`` to get policies on - entries. - - ``datacatalog.entryGroups.getIamPolicy`` to get policies on - entry groups. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_get_iam_policy(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): - The request object. Request message for ``GetIamPolicy`` method. - resource (str): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.GetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns the caller's permissions on a resource. If the resource - does not exist, an empty set of permissions is returned (We - don't return a ``NOT_FOUND`` error). - - Supported resources are: - - - Tag templates. - - Entries. - - Entry groups. Note, this method cannot be used to manage - policies for BigQuery, Pub/Sub and any external Google Cloud - Platform resources synced to Data Catalog. - - A caller is not required to have Google IAM permission to make - this request. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_test_iam_permissions(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "DataCatalogClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DataCatalogClient", -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/pagers.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/pagers.py deleted file mode 100644 index 7aa71d9465ff..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/pagers.py +++ /dev/null @@ -1,504 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.datacatalog_v1beta1.types import datacatalog -from google.cloud.datacatalog_v1beta1.types import search -from google.cloud.datacatalog_v1beta1.types import tags - - -class SearchCatalogPager: - """A pager for iterating through ``search_catalog`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1beta1.types.SearchCatalogResponse` object, and - provides an ``__iter__`` method to iterate through its - ``results`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``SearchCatalog`` requests and continue to iterate - through the ``results`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1beta1.types.SearchCatalogResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., datacatalog.SearchCatalogResponse], - request: datacatalog.SearchCatalogRequest, - response: datacatalog.SearchCatalogResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1beta1.types.SearchCatalogRequest): - The initial request object. - response (google.cloud.datacatalog_v1beta1.types.SearchCatalogResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = datacatalog.SearchCatalogRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[datacatalog.SearchCatalogResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[search.SearchCatalogResult]: - for page in self.pages: - yield from page.results - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class SearchCatalogAsyncPager: - """A pager for iterating through ``search_catalog`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1beta1.types.SearchCatalogResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``results`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``SearchCatalog`` requests and continue to iterate - through the ``results`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1beta1.types.SearchCatalogResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[datacatalog.SearchCatalogResponse]], - request: datacatalog.SearchCatalogRequest, - response: datacatalog.SearchCatalogResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1beta1.types.SearchCatalogRequest): - The initial request object. - response (google.cloud.datacatalog_v1beta1.types.SearchCatalogResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = datacatalog.SearchCatalogRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[datacatalog.SearchCatalogResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[search.SearchCatalogResult]: - async def async_generator(): - async for page in self.pages: - for response in page.results: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEntryGroupsPager: - """A pager for iterating through ``list_entry_groups`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1beta1.types.ListEntryGroupsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``entry_groups`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListEntryGroups`` requests and continue to iterate - through the ``entry_groups`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1beta1.types.ListEntryGroupsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., datacatalog.ListEntryGroupsResponse], - request: datacatalog.ListEntryGroupsRequest, - response: datacatalog.ListEntryGroupsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1beta1.types.ListEntryGroupsRequest): - The initial request object. - response (google.cloud.datacatalog_v1beta1.types.ListEntryGroupsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = datacatalog.ListEntryGroupsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[datacatalog.ListEntryGroupsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[datacatalog.EntryGroup]: - for page in self.pages: - yield from page.entry_groups - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEntryGroupsAsyncPager: - """A pager for iterating through ``list_entry_groups`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1beta1.types.ListEntryGroupsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``entry_groups`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListEntryGroups`` requests and continue to iterate - through the ``entry_groups`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1beta1.types.ListEntryGroupsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[datacatalog.ListEntryGroupsResponse]], - request: datacatalog.ListEntryGroupsRequest, - response: datacatalog.ListEntryGroupsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1beta1.types.ListEntryGroupsRequest): - The initial request object. - response (google.cloud.datacatalog_v1beta1.types.ListEntryGroupsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = datacatalog.ListEntryGroupsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[datacatalog.ListEntryGroupsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[datacatalog.EntryGroup]: - async def async_generator(): - async for page in self.pages: - for response in page.entry_groups: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEntriesPager: - """A pager for iterating through ``list_entries`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1beta1.types.ListEntriesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``entries`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListEntries`` requests and continue to iterate - through the ``entries`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1beta1.types.ListEntriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., datacatalog.ListEntriesResponse], - request: datacatalog.ListEntriesRequest, - response: datacatalog.ListEntriesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1beta1.types.ListEntriesRequest): - The initial request object. - response (google.cloud.datacatalog_v1beta1.types.ListEntriesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = datacatalog.ListEntriesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[datacatalog.ListEntriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[datacatalog.Entry]: - for page in self.pages: - yield from page.entries - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListEntriesAsyncPager: - """A pager for iterating through ``list_entries`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1beta1.types.ListEntriesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``entries`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListEntries`` requests and continue to iterate - through the ``entries`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1beta1.types.ListEntriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[datacatalog.ListEntriesResponse]], - request: datacatalog.ListEntriesRequest, - response: datacatalog.ListEntriesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1beta1.types.ListEntriesRequest): - The initial request object. - response (google.cloud.datacatalog_v1beta1.types.ListEntriesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = datacatalog.ListEntriesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[datacatalog.ListEntriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[datacatalog.Entry]: - async def async_generator(): - async for page in self.pages: - for response in page.entries: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTagsPager: - """A pager for iterating through ``list_tags`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1beta1.types.ListTagsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``tags`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTags`` requests and continue to iterate - through the ``tags`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1beta1.types.ListTagsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., datacatalog.ListTagsResponse], - request: datacatalog.ListTagsRequest, - response: datacatalog.ListTagsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1beta1.types.ListTagsRequest): - The initial request object. - response (google.cloud.datacatalog_v1beta1.types.ListTagsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = datacatalog.ListTagsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[datacatalog.ListTagsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[tags.Tag]: - for page in self.pages: - yield from page.tags - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTagsAsyncPager: - """A pager for iterating through ``list_tags`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1beta1.types.ListTagsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``tags`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListTags`` requests and continue to iterate - through the ``tags`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1beta1.types.ListTagsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[datacatalog.ListTagsResponse]], - request: datacatalog.ListTagsRequest, - response: datacatalog.ListTagsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1beta1.types.ListTagsRequest): - The initial request object. - response (google.cloud.datacatalog_v1beta1.types.ListTagsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = datacatalog.ListTagsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[datacatalog.ListTagsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[tags.Tag]: - async def async_generator(): - async for page in self.pages: - for response in page.tags: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/__init__.py deleted file mode 100644 index 8b4fbbf168be..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import DataCatalogTransport -from .grpc import DataCatalogGrpcTransport -from .grpc_asyncio import DataCatalogGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[DataCatalogTransport]] -_transport_registry['grpc'] = DataCatalogGrpcTransport -_transport_registry['grpc_asyncio'] = DataCatalogGrpcAsyncIOTransport - -__all__ = ( - 'DataCatalogTransport', - 'DataCatalogGrpcTransport', - 'DataCatalogGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py deleted file mode 100644 index 78f23cdcf3c7..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py +++ /dev/null @@ -1,531 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.datacatalog_v1beta1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.datacatalog_v1beta1.types import datacatalog -from google.cloud.datacatalog_v1beta1.types import tags -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class DataCatalogTransport(abc.ABC): - """Abstract transport class for DataCatalog.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'datacatalog.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.search_catalog: gapic_v1.method.wrap_method( - self.search_catalog, - default_timeout=None, - client_info=client_info, - ), - self.create_entry_group: gapic_v1.method.wrap_method( - self.create_entry_group, - default_timeout=None, - client_info=client_info, - ), - self.update_entry_group: gapic_v1.method.wrap_method( - self.update_entry_group, - default_timeout=None, - client_info=client_info, - ), - self.get_entry_group: gapic_v1.method.wrap_method( - self.get_entry_group, - default_timeout=None, - client_info=client_info, - ), - self.delete_entry_group: gapic_v1.method.wrap_method( - self.delete_entry_group, - default_timeout=None, - client_info=client_info, - ), - self.list_entry_groups: gapic_v1.method.wrap_method( - self.list_entry_groups, - default_timeout=None, - client_info=client_info, - ), - self.create_entry: gapic_v1.method.wrap_method( - self.create_entry, - default_timeout=None, - client_info=client_info, - ), - self.update_entry: gapic_v1.method.wrap_method( - self.update_entry, - default_timeout=None, - client_info=client_info, - ), - self.delete_entry: gapic_v1.method.wrap_method( - self.delete_entry, - default_timeout=None, - client_info=client_info, - ), - self.get_entry: gapic_v1.method.wrap_method( - self.get_entry, - default_timeout=None, - client_info=client_info, - ), - self.lookup_entry: gapic_v1.method.wrap_method( - self.lookup_entry, - default_timeout=None, - client_info=client_info, - ), - self.list_entries: gapic_v1.method.wrap_method( - self.list_entries, - default_timeout=None, - client_info=client_info, - ), - self.create_tag_template: gapic_v1.method.wrap_method( - self.create_tag_template, - default_timeout=None, - client_info=client_info, - ), - self.get_tag_template: gapic_v1.method.wrap_method( - self.get_tag_template, - default_timeout=None, - client_info=client_info, - ), - self.update_tag_template: gapic_v1.method.wrap_method( - self.update_tag_template, - default_timeout=None, - client_info=client_info, - ), - self.delete_tag_template: gapic_v1.method.wrap_method( - self.delete_tag_template, - default_timeout=None, - client_info=client_info, - ), - self.create_tag_template_field: gapic_v1.method.wrap_method( - self.create_tag_template_field, - default_timeout=None, - client_info=client_info, - ), - self.update_tag_template_field: gapic_v1.method.wrap_method( - self.update_tag_template_field, - default_timeout=None, - client_info=client_info, - ), - self.rename_tag_template_field: gapic_v1.method.wrap_method( - self.rename_tag_template_field, - default_timeout=None, - client_info=client_info, - ), - self.rename_tag_template_field_enum_value: gapic_v1.method.wrap_method( - self.rename_tag_template_field_enum_value, - default_timeout=None, - client_info=client_info, - ), - self.delete_tag_template_field: gapic_v1.method.wrap_method( - self.delete_tag_template_field, - default_timeout=None, - client_info=client_info, - ), - self.create_tag: gapic_v1.method.wrap_method( - self.create_tag, - default_timeout=None, - client_info=client_info, - ), - self.update_tag: gapic_v1.method.wrap_method( - self.update_tag, - default_timeout=None, - client_info=client_info, - ), - self.delete_tag: gapic_v1.method.wrap_method( - self.delete_tag, - default_timeout=None, - client_info=client_info, - ), - self.list_tags: gapic_v1.method.wrap_method( - self.list_tags, - default_timeout=None, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def search_catalog(self) -> Callable[ - [datacatalog.SearchCatalogRequest], - Union[ - datacatalog.SearchCatalogResponse, - Awaitable[datacatalog.SearchCatalogResponse] - ]]: - raise NotImplementedError() - - @property - def create_entry_group(self) -> Callable[ - [datacatalog.CreateEntryGroupRequest], - Union[ - datacatalog.EntryGroup, - Awaitable[datacatalog.EntryGroup] - ]]: - raise NotImplementedError() - - @property - def update_entry_group(self) -> Callable[ - [datacatalog.UpdateEntryGroupRequest], - Union[ - datacatalog.EntryGroup, - Awaitable[datacatalog.EntryGroup] - ]]: - raise NotImplementedError() - - @property - def get_entry_group(self) -> Callable[ - [datacatalog.GetEntryGroupRequest], - Union[ - datacatalog.EntryGroup, - Awaitable[datacatalog.EntryGroup] - ]]: - raise NotImplementedError() - - @property - def delete_entry_group(self) -> Callable[ - [datacatalog.DeleteEntryGroupRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_entry_groups(self) -> Callable[ - [datacatalog.ListEntryGroupsRequest], - Union[ - datacatalog.ListEntryGroupsResponse, - Awaitable[datacatalog.ListEntryGroupsResponse] - ]]: - raise NotImplementedError() - - @property - def create_entry(self) -> Callable[ - [datacatalog.CreateEntryRequest], - Union[ - datacatalog.Entry, - Awaitable[datacatalog.Entry] - ]]: - raise NotImplementedError() - - @property - def update_entry(self) -> Callable[ - [datacatalog.UpdateEntryRequest], - Union[ - datacatalog.Entry, - Awaitable[datacatalog.Entry] - ]]: - raise NotImplementedError() - - @property - def delete_entry(self) -> Callable[ - [datacatalog.DeleteEntryRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_entry(self) -> Callable[ - [datacatalog.GetEntryRequest], - Union[ - datacatalog.Entry, - Awaitable[datacatalog.Entry] - ]]: - raise NotImplementedError() - - @property - def lookup_entry(self) -> Callable[ - [datacatalog.LookupEntryRequest], - Union[ - datacatalog.Entry, - Awaitable[datacatalog.Entry] - ]]: - raise NotImplementedError() - - @property - def list_entries(self) -> Callable[ - [datacatalog.ListEntriesRequest], - Union[ - datacatalog.ListEntriesResponse, - Awaitable[datacatalog.ListEntriesResponse] - ]]: - raise NotImplementedError() - - @property - def create_tag_template(self) -> Callable[ - [datacatalog.CreateTagTemplateRequest], - Union[ - tags.TagTemplate, - Awaitable[tags.TagTemplate] - ]]: - raise NotImplementedError() - - @property - def get_tag_template(self) -> Callable[ - [datacatalog.GetTagTemplateRequest], - Union[ - tags.TagTemplate, - Awaitable[tags.TagTemplate] - ]]: - raise NotImplementedError() - - @property - def update_tag_template(self) -> Callable[ - [datacatalog.UpdateTagTemplateRequest], - Union[ - tags.TagTemplate, - Awaitable[tags.TagTemplate] - ]]: - raise NotImplementedError() - - @property - def delete_tag_template(self) -> Callable[ - [datacatalog.DeleteTagTemplateRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_tag_template_field(self) -> Callable[ - [datacatalog.CreateTagTemplateFieldRequest], - Union[ - tags.TagTemplateField, - Awaitable[tags.TagTemplateField] - ]]: - raise NotImplementedError() - - @property - def update_tag_template_field(self) -> Callable[ - [datacatalog.UpdateTagTemplateFieldRequest], - Union[ - tags.TagTemplateField, - Awaitable[tags.TagTemplateField] - ]]: - raise NotImplementedError() - - @property - def rename_tag_template_field(self) -> Callable[ - [datacatalog.RenameTagTemplateFieldRequest], - Union[ - tags.TagTemplateField, - Awaitable[tags.TagTemplateField] - ]]: - raise NotImplementedError() - - @property - def rename_tag_template_field_enum_value(self) -> Callable[ - [datacatalog.RenameTagTemplateFieldEnumValueRequest], - Union[ - tags.TagTemplateField, - Awaitable[tags.TagTemplateField] - ]]: - raise NotImplementedError() - - @property - def delete_tag_template_field(self) -> Callable[ - [datacatalog.DeleteTagTemplateFieldRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_tag(self) -> Callable[ - [datacatalog.CreateTagRequest], - Union[ - tags.Tag, - Awaitable[tags.Tag] - ]]: - raise NotImplementedError() - - @property - def update_tag(self) -> Callable[ - [datacatalog.UpdateTagRequest], - Union[ - tags.Tag, - Awaitable[tags.Tag] - ]]: - raise NotImplementedError() - - @property - def delete_tag(self) -> Callable[ - [datacatalog.DeleteTagRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_tags(self) -> Callable[ - [datacatalog.ListTagsRequest], - Union[ - datacatalog.ListTagsResponse, - Awaitable[datacatalog.ListTagsResponse] - ]]: - raise NotImplementedError() - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'DataCatalogTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py deleted file mode 100644 index a93af57fea57..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py +++ /dev/null @@ -1,1122 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.datacatalog_v1beta1.types import datacatalog -from google.cloud.datacatalog_v1beta1.types import tags -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO - - -class DataCatalogGrpcTransport(DataCatalogTransport): - """gRPC backend transport for DataCatalog. - - Data Catalog API service allows clients to discover, - understand, and manage their data. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def search_catalog(self) -> Callable[ - [datacatalog.SearchCatalogRequest], - datacatalog.SearchCatalogResponse]: - r"""Return a callable for the search catalog method over gRPC. - - Searches Data Catalog for multiple resources like entries, tags - that match a query. - - This is a custom method - (https://cloud.google.com/apis/design/custom_methods) and does - not return the complete resource, only the resource identifier - and high level fields. Clients can subsequently call ``Get`` - methods. - - Note that Data Catalog search queries do not guarantee full - recall. Query results that match your query may not be returned, - even in subsequent result pages. Also note that results returned - (and not returned) can vary across repeated search queries. - - See `Data Catalog Search - Syntax `__ - for more information. - - Returns: - Callable[[~.SearchCatalogRequest], - ~.SearchCatalogResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_catalog' not in self._stubs: - self._stubs['search_catalog'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/SearchCatalog', - request_serializer=datacatalog.SearchCatalogRequest.serialize, - response_deserializer=datacatalog.SearchCatalogResponse.deserialize, - ) - return self._stubs['search_catalog'] - - @property - def create_entry_group(self) -> Callable[ - [datacatalog.CreateEntryGroupRequest], - datacatalog.EntryGroup]: - r"""Return a callable for the create entry group method over gRPC. - - A maximum of 10,000 entry groups may be created per organization - across all locations. - - Users should enable the Data Catalog API in the project - identified by the ``parent`` parameter (see [Data Catalog - Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - Returns: - Callable[[~.CreateEntryGroupRequest], - ~.EntryGroup]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entry_group' not in self._stubs: - self._stubs['create_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/CreateEntryGroup', - request_serializer=datacatalog.CreateEntryGroupRequest.serialize, - response_deserializer=datacatalog.EntryGroup.deserialize, - ) - return self._stubs['create_entry_group'] - - @property - def update_entry_group(self) -> Callable[ - [datacatalog.UpdateEntryGroupRequest], - datacatalog.EntryGroup]: - r"""Return a callable for the update entry group method over gRPC. - - Updates an EntryGroup. The user should enable the Data Catalog - API in the project identified by the ``entry_group.name`` - parameter (see [Data Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - Returns: - Callable[[~.UpdateEntryGroupRequest], - ~.EntryGroup]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entry_group' not in self._stubs: - self._stubs['update_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/UpdateEntryGroup', - request_serializer=datacatalog.UpdateEntryGroupRequest.serialize, - response_deserializer=datacatalog.EntryGroup.deserialize, - ) - return self._stubs['update_entry_group'] - - @property - def get_entry_group(self) -> Callable[ - [datacatalog.GetEntryGroupRequest], - datacatalog.EntryGroup]: - r"""Return a callable for the get entry group method over gRPC. - - Gets an EntryGroup. - - Returns: - Callable[[~.GetEntryGroupRequest], - ~.EntryGroup]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entry_group' not in self._stubs: - self._stubs['get_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/GetEntryGroup', - request_serializer=datacatalog.GetEntryGroupRequest.serialize, - response_deserializer=datacatalog.EntryGroup.deserialize, - ) - return self._stubs['get_entry_group'] - - @property - def delete_entry_group(self) -> Callable[ - [datacatalog.DeleteEntryGroupRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete entry group method over gRPC. - - Deletes an EntryGroup. Only entry groups that do not contain - entries can be deleted. Users should enable the Data Catalog API - in the project identified by the ``name`` parameter (see [Data - Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - Returns: - Callable[[~.DeleteEntryGroupRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entry_group' not in self._stubs: - self._stubs['delete_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteEntryGroup', - request_serializer=datacatalog.DeleteEntryGroupRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_entry_group'] - - @property - def list_entry_groups(self) -> Callable[ - [datacatalog.ListEntryGroupsRequest], - datacatalog.ListEntryGroupsResponse]: - r"""Return a callable for the list entry groups method over gRPC. - - Lists entry groups. - - Returns: - Callable[[~.ListEntryGroupsRequest], - ~.ListEntryGroupsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entry_groups' not in self._stubs: - self._stubs['list_entry_groups'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/ListEntryGroups', - request_serializer=datacatalog.ListEntryGroupsRequest.serialize, - response_deserializer=datacatalog.ListEntryGroupsResponse.deserialize, - ) - return self._stubs['list_entry_groups'] - - @property - def create_entry(self) -> Callable[ - [datacatalog.CreateEntryRequest], - datacatalog.Entry]: - r"""Return a callable for the create entry method over gRPC. - - Creates an entry. Only entries of 'FILESET' type or - user-specified type can be created. - - Users should enable the Data Catalog API in the project - identified by the ``parent`` parameter (see [Data Catalog - Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - A maximum of 100,000 entries may be created per entry group. - - Returns: - Callable[[~.CreateEntryRequest], - ~.Entry]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entry' not in self._stubs: - self._stubs['create_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/CreateEntry', - request_serializer=datacatalog.CreateEntryRequest.serialize, - response_deserializer=datacatalog.Entry.deserialize, - ) - return self._stubs['create_entry'] - - @property - def update_entry(self) -> Callable[ - [datacatalog.UpdateEntryRequest], - datacatalog.Entry]: - r"""Return a callable for the update entry method over gRPC. - - Updates an existing entry. Users should enable the Data Catalog - API in the project identified by the ``entry.name`` parameter - (see [Data Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - Returns: - Callable[[~.UpdateEntryRequest], - ~.Entry]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entry' not in self._stubs: - self._stubs['update_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/UpdateEntry', - request_serializer=datacatalog.UpdateEntryRequest.serialize, - response_deserializer=datacatalog.Entry.deserialize, - ) - return self._stubs['update_entry'] - - @property - def delete_entry(self) -> Callable[ - [datacatalog.DeleteEntryRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete entry method over gRPC. - - Deletes an existing entry. Only entries created through - [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry] - method can be deleted. Users should enable the Data Catalog API - in the project identified by the ``name`` parameter (see [Data - Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - Returns: - Callable[[~.DeleteEntryRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entry' not in self._stubs: - self._stubs['delete_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteEntry', - request_serializer=datacatalog.DeleteEntryRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_entry'] - - @property - def get_entry(self) -> Callable[ - [datacatalog.GetEntryRequest], - datacatalog.Entry]: - r"""Return a callable for the get entry method over gRPC. - - Gets an entry. - - Returns: - Callable[[~.GetEntryRequest], - ~.Entry]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entry' not in self._stubs: - self._stubs['get_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/GetEntry', - request_serializer=datacatalog.GetEntryRequest.serialize, - response_deserializer=datacatalog.Entry.deserialize, - ) - return self._stubs['get_entry'] - - @property - def lookup_entry(self) -> Callable[ - [datacatalog.LookupEntryRequest], - datacatalog.Entry]: - r"""Return a callable for the lookup entry method over gRPC. - - Get an entry by target resource name. This method - allows clients to use the resource name from the source - Google Cloud Platform service to get the Data Catalog - Entry. - - Returns: - Callable[[~.LookupEntryRequest], - ~.Entry]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'lookup_entry' not in self._stubs: - self._stubs['lookup_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/LookupEntry', - request_serializer=datacatalog.LookupEntryRequest.serialize, - response_deserializer=datacatalog.Entry.deserialize, - ) - return self._stubs['lookup_entry'] - - @property - def list_entries(self) -> Callable[ - [datacatalog.ListEntriesRequest], - datacatalog.ListEntriesResponse]: - r"""Return a callable for the list entries method over gRPC. - - Lists entries. - - Returns: - Callable[[~.ListEntriesRequest], - ~.ListEntriesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entries' not in self._stubs: - self._stubs['list_entries'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/ListEntries', - request_serializer=datacatalog.ListEntriesRequest.serialize, - response_deserializer=datacatalog.ListEntriesResponse.deserialize, - ) - return self._stubs['list_entries'] - - @property - def create_tag_template(self) -> Callable[ - [datacatalog.CreateTagTemplateRequest], - tags.TagTemplate]: - r"""Return a callable for the create tag template method over gRPC. - - Creates a tag template. The user should enable the Data Catalog - API in the project identified by the ``parent`` parameter (see - `Data Catalog Resource - Project `__ - for more information). - - Returns: - Callable[[~.CreateTagTemplateRequest], - ~.TagTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_tag_template' not in self._stubs: - self._stubs['create_tag_template'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/CreateTagTemplate', - request_serializer=datacatalog.CreateTagTemplateRequest.serialize, - response_deserializer=tags.TagTemplate.deserialize, - ) - return self._stubs['create_tag_template'] - - @property - def get_tag_template(self) -> Callable[ - [datacatalog.GetTagTemplateRequest], - tags.TagTemplate]: - r"""Return a callable for the get tag template method over gRPC. - - Gets a tag template. - - Returns: - Callable[[~.GetTagTemplateRequest], - ~.TagTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_tag_template' not in self._stubs: - self._stubs['get_tag_template'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/GetTagTemplate', - request_serializer=datacatalog.GetTagTemplateRequest.serialize, - response_deserializer=tags.TagTemplate.deserialize, - ) - return self._stubs['get_tag_template'] - - @property - def update_tag_template(self) -> Callable[ - [datacatalog.UpdateTagTemplateRequest], - tags.TagTemplate]: - r"""Return a callable for the update tag template method over gRPC. - - Updates a tag template. This method cannot be used to update the - fields of a template. The tag template fields are represented as - separate resources and should be updated using their own - create/update/delete methods. Users should enable the Data - Catalog API in the project identified by the - ``tag_template.name`` parameter (see [Data Catalog Resource - Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - Returns: - Callable[[~.UpdateTagTemplateRequest], - ~.TagTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_tag_template' not in self._stubs: - self._stubs['update_tag_template'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/UpdateTagTemplate', - request_serializer=datacatalog.UpdateTagTemplateRequest.serialize, - response_deserializer=tags.TagTemplate.deserialize, - ) - return self._stubs['update_tag_template'] - - @property - def delete_tag_template(self) -> Callable[ - [datacatalog.DeleteTagTemplateRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete tag template method over gRPC. - - Deletes a tag template and all tags using the template. Users - should enable the Data Catalog API in the project identified by - the ``name`` parameter (see [Data Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - Returns: - Callable[[~.DeleteTagTemplateRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_tag_template' not in self._stubs: - self._stubs['delete_tag_template'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteTagTemplate', - request_serializer=datacatalog.DeleteTagTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_tag_template'] - - @property - def create_tag_template_field(self) -> Callable[ - [datacatalog.CreateTagTemplateFieldRequest], - tags.TagTemplateField]: - r"""Return a callable for the create tag template field method over gRPC. - - Creates a field in a tag template. The user should enable the - Data Catalog API in the project identified by the ``parent`` - parameter (see `Data Catalog Resource - Project `__ - for more information). - - Returns: - Callable[[~.CreateTagTemplateFieldRequest], - ~.TagTemplateField]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_tag_template_field' not in self._stubs: - self._stubs['create_tag_template_field'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/CreateTagTemplateField', - request_serializer=datacatalog.CreateTagTemplateFieldRequest.serialize, - response_deserializer=tags.TagTemplateField.deserialize, - ) - return self._stubs['create_tag_template_field'] - - @property - def update_tag_template_field(self) -> Callable[ - [datacatalog.UpdateTagTemplateFieldRequest], - tags.TagTemplateField]: - r"""Return a callable for the update tag template field method over gRPC. - - Updates a field in a tag template. This method cannot be used to - update the field type. Users should enable the Data Catalog API - in the project identified by the ``name`` parameter (see [Data - Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - Returns: - Callable[[~.UpdateTagTemplateFieldRequest], - ~.TagTemplateField]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_tag_template_field' not in self._stubs: - self._stubs['update_tag_template_field'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/UpdateTagTemplateField', - request_serializer=datacatalog.UpdateTagTemplateFieldRequest.serialize, - response_deserializer=tags.TagTemplateField.deserialize, - ) - return self._stubs['update_tag_template_field'] - - @property - def rename_tag_template_field(self) -> Callable[ - [datacatalog.RenameTagTemplateFieldRequest], - tags.TagTemplateField]: - r"""Return a callable for the rename tag template field method over gRPC. - - Renames a field in a tag template. The user should enable the - Data Catalog API in the project identified by the ``name`` - parameter (see `Data Catalog Resource - Project `__ - for more information). - - Returns: - Callable[[~.RenameTagTemplateFieldRequest], - ~.TagTemplateField]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rename_tag_template_field' not in self._stubs: - self._stubs['rename_tag_template_field'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/RenameTagTemplateField', - request_serializer=datacatalog.RenameTagTemplateFieldRequest.serialize, - response_deserializer=tags.TagTemplateField.deserialize, - ) - return self._stubs['rename_tag_template_field'] - - @property - def rename_tag_template_field_enum_value(self) -> Callable[ - [datacatalog.RenameTagTemplateFieldEnumValueRequest], - tags.TagTemplateField]: - r"""Return a callable for the rename tag template field enum - value method over gRPC. - - Renames an enum value in a tag template. The enum - values have to be unique within one enum field. Thus, an - enum value cannot be renamed with a name used in any - other enum value within the same enum field. - - Returns: - Callable[[~.RenameTagTemplateFieldEnumValueRequest], - ~.TagTemplateField]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rename_tag_template_field_enum_value' not in self._stubs: - self._stubs['rename_tag_template_field_enum_value'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/RenameTagTemplateFieldEnumValue', - request_serializer=datacatalog.RenameTagTemplateFieldEnumValueRequest.serialize, - response_deserializer=tags.TagTemplateField.deserialize, - ) - return self._stubs['rename_tag_template_field_enum_value'] - - @property - def delete_tag_template_field(self) -> Callable[ - [datacatalog.DeleteTagTemplateFieldRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete tag template field method over gRPC. - - Deletes a field in a tag template and all uses of that field. - Users should enable the Data Catalog API in the project - identified by the ``name`` parameter (see [Data Catalog Resource - Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - Returns: - Callable[[~.DeleteTagTemplateFieldRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_tag_template_field' not in self._stubs: - self._stubs['delete_tag_template_field'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteTagTemplateField', - request_serializer=datacatalog.DeleteTagTemplateFieldRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_tag_template_field'] - - @property - def create_tag(self) -> Callable[ - [datacatalog.CreateTagRequest], - tags.Tag]: - r"""Return a callable for the create tag method over gRPC. - - Creates a tag on an - [Entry][google.cloud.datacatalog.v1beta1.Entry]. Note: The - project identified by the ``parent`` parameter for the - `tag `__ - and the `tag - template `__ - used to create the tag must be from the same organization. - - Returns: - Callable[[~.CreateTagRequest], - ~.Tag]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_tag' not in self._stubs: - self._stubs['create_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/CreateTag', - request_serializer=datacatalog.CreateTagRequest.serialize, - response_deserializer=tags.Tag.deserialize, - ) - return self._stubs['create_tag'] - - @property - def update_tag(self) -> Callable[ - [datacatalog.UpdateTagRequest], - tags.Tag]: - r"""Return a callable for the update tag method over gRPC. - - Updates an existing tag. - - Returns: - Callable[[~.UpdateTagRequest], - ~.Tag]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_tag' not in self._stubs: - self._stubs['update_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/UpdateTag', - request_serializer=datacatalog.UpdateTagRequest.serialize, - response_deserializer=tags.Tag.deserialize, - ) - return self._stubs['update_tag'] - - @property - def delete_tag(self) -> Callable[ - [datacatalog.DeleteTagRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete tag method over gRPC. - - Deletes a tag. - - Returns: - Callable[[~.DeleteTagRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_tag' not in self._stubs: - self._stubs['delete_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteTag', - request_serializer=datacatalog.DeleteTagRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_tag'] - - @property - def list_tags(self) -> Callable[ - [datacatalog.ListTagsRequest], - datacatalog.ListTagsResponse]: - r"""Return a callable for the list tags method over gRPC. - - Lists tags assigned to an - [Entry][google.cloud.datacatalog.v1beta1.Entry]. The - [columns][google.cloud.datacatalog.v1beta1.Tag.column] in the - response are lowercased. - - Returns: - Callable[[~.ListTagsRequest], - ~.ListTagsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_tags' not in self._stubs: - self._stubs['list_tags'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/ListTags', - request_serializer=datacatalog.ListTagsRequest.serialize, - response_deserializer=datacatalog.ListTagsResponse.deserialize, - ) - return self._stubs['list_tags'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy for a resource. Replaces any - existing policy. Supported resources are: - - - Tag templates. - - Entries. - - Entry groups. Note, this method cannot be used to manage - policies for BigQuery, Pub/Sub and any external Google Cloud - Platform resources synced to Data Catalog. - - Callers must have following Google IAM permission - - - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on - tag templates. - - ``datacatalog.entries.setIamPolicy`` to set policies on - entries. - - ``datacatalog.entryGroups.setIamPolicy`` to set policies on - entry groups. - - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for a resource. A ``NOT_FOUND`` - error is returned if the resource does not exist. An empty - policy is returned if the resource exists but does not have a - policy set on it. - - Supported resources are: - - - Tag templates. - - Entries. - - Entry groups. Note, this method cannot be used to manage - policies for BigQuery, Pub/Sub and any external Google Cloud - Platform resources synced to Data Catalog. - - Callers must have following Google IAM permission - - - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on - tag templates. - - ``datacatalog.entries.getIamPolicy`` to get policies on - entries. - - ``datacatalog.entryGroups.getIamPolicy`` to get policies on - entry groups. - - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns the caller's permissions on a resource. If the resource - does not exist, an empty set of permissions is returned (We - don't return a ``NOT_FOUND`` error). - - Supported resources are: - - - Tag templates. - - Entries. - - Entry groups. Note, this method cannot be used to manage - policies for BigQuery, Pub/Sub and any external Google Cloud - Platform resources synced to Data Catalog. - - A caller is not required to have Google IAM permission to make - this request. - - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - def close(self): - self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'DataCatalogGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py deleted file mode 100644 index 076ac6bd0899..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py +++ /dev/null @@ -1,1121 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.datacatalog_v1beta1.types import datacatalog -from google.cloud.datacatalog_v1beta1.types import tags -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO -from .grpc import DataCatalogGrpcTransport - - -class DataCatalogGrpcAsyncIOTransport(DataCatalogTransport): - """gRPC AsyncIO backend transport for DataCatalog. - - Data Catalog API service allows clients to discover, - understand, and manage their data. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def search_catalog(self) -> Callable[ - [datacatalog.SearchCatalogRequest], - Awaitable[datacatalog.SearchCatalogResponse]]: - r"""Return a callable for the search catalog method over gRPC. - - Searches Data Catalog for multiple resources like entries, tags - that match a query. - - This is a custom method - (https://cloud.google.com/apis/design/custom_methods) and does - not return the complete resource, only the resource identifier - and high level fields. Clients can subsequently call ``Get`` - methods. - - Note that Data Catalog search queries do not guarantee full - recall. Query results that match your query may not be returned, - even in subsequent result pages. Also note that results returned - (and not returned) can vary across repeated search queries. - - See `Data Catalog Search - Syntax `__ - for more information. - - Returns: - Callable[[~.SearchCatalogRequest], - Awaitable[~.SearchCatalogResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_catalog' not in self._stubs: - self._stubs['search_catalog'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/SearchCatalog', - request_serializer=datacatalog.SearchCatalogRequest.serialize, - response_deserializer=datacatalog.SearchCatalogResponse.deserialize, - ) - return self._stubs['search_catalog'] - - @property - def create_entry_group(self) -> Callable[ - [datacatalog.CreateEntryGroupRequest], - Awaitable[datacatalog.EntryGroup]]: - r"""Return a callable for the create entry group method over gRPC. - - A maximum of 10,000 entry groups may be created per organization - across all locations. - - Users should enable the Data Catalog API in the project - identified by the ``parent`` parameter (see [Data Catalog - Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - Returns: - Callable[[~.CreateEntryGroupRequest], - Awaitable[~.EntryGroup]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entry_group' not in self._stubs: - self._stubs['create_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/CreateEntryGroup', - request_serializer=datacatalog.CreateEntryGroupRequest.serialize, - response_deserializer=datacatalog.EntryGroup.deserialize, - ) - return self._stubs['create_entry_group'] - - @property - def update_entry_group(self) -> Callable[ - [datacatalog.UpdateEntryGroupRequest], - Awaitable[datacatalog.EntryGroup]]: - r"""Return a callable for the update entry group method over gRPC. - - Updates an EntryGroup. The user should enable the Data Catalog - API in the project identified by the ``entry_group.name`` - parameter (see [Data Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - Returns: - Callable[[~.UpdateEntryGroupRequest], - Awaitable[~.EntryGroup]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entry_group' not in self._stubs: - self._stubs['update_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/UpdateEntryGroup', - request_serializer=datacatalog.UpdateEntryGroupRequest.serialize, - response_deserializer=datacatalog.EntryGroup.deserialize, - ) - return self._stubs['update_entry_group'] - - @property - def get_entry_group(self) -> Callable[ - [datacatalog.GetEntryGroupRequest], - Awaitable[datacatalog.EntryGroup]]: - r"""Return a callable for the get entry group method over gRPC. - - Gets an EntryGroup. - - Returns: - Callable[[~.GetEntryGroupRequest], - Awaitable[~.EntryGroup]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entry_group' not in self._stubs: - self._stubs['get_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/GetEntryGroup', - request_serializer=datacatalog.GetEntryGroupRequest.serialize, - response_deserializer=datacatalog.EntryGroup.deserialize, - ) - return self._stubs['get_entry_group'] - - @property - def delete_entry_group(self) -> Callable[ - [datacatalog.DeleteEntryGroupRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete entry group method over gRPC. - - Deletes an EntryGroup. Only entry groups that do not contain - entries can be deleted. Users should enable the Data Catalog API - in the project identified by the ``name`` parameter (see [Data - Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - Returns: - Callable[[~.DeleteEntryGroupRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entry_group' not in self._stubs: - self._stubs['delete_entry_group'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteEntryGroup', - request_serializer=datacatalog.DeleteEntryGroupRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_entry_group'] - - @property - def list_entry_groups(self) -> Callable[ - [datacatalog.ListEntryGroupsRequest], - Awaitable[datacatalog.ListEntryGroupsResponse]]: - r"""Return a callable for the list entry groups method over gRPC. - - Lists entry groups. - - Returns: - Callable[[~.ListEntryGroupsRequest], - Awaitable[~.ListEntryGroupsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entry_groups' not in self._stubs: - self._stubs['list_entry_groups'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/ListEntryGroups', - request_serializer=datacatalog.ListEntryGroupsRequest.serialize, - response_deserializer=datacatalog.ListEntryGroupsResponse.deserialize, - ) - return self._stubs['list_entry_groups'] - - @property - def create_entry(self) -> Callable[ - [datacatalog.CreateEntryRequest], - Awaitable[datacatalog.Entry]]: - r"""Return a callable for the create entry method over gRPC. - - Creates an entry. Only entries of 'FILESET' type or - user-specified type can be created. - - Users should enable the Data Catalog API in the project - identified by the ``parent`` parameter (see [Data Catalog - Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - A maximum of 100,000 entries may be created per entry group. - - Returns: - Callable[[~.CreateEntryRequest], - Awaitable[~.Entry]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_entry' not in self._stubs: - self._stubs['create_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/CreateEntry', - request_serializer=datacatalog.CreateEntryRequest.serialize, - response_deserializer=datacatalog.Entry.deserialize, - ) - return self._stubs['create_entry'] - - @property - def update_entry(self) -> Callable[ - [datacatalog.UpdateEntryRequest], - Awaitable[datacatalog.Entry]]: - r"""Return a callable for the update entry method over gRPC. - - Updates an existing entry. Users should enable the Data Catalog - API in the project identified by the ``entry.name`` parameter - (see [Data Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - Returns: - Callable[[~.UpdateEntryRequest], - Awaitable[~.Entry]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_entry' not in self._stubs: - self._stubs['update_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/UpdateEntry', - request_serializer=datacatalog.UpdateEntryRequest.serialize, - response_deserializer=datacatalog.Entry.deserialize, - ) - return self._stubs['update_entry'] - - @property - def delete_entry(self) -> Callable[ - [datacatalog.DeleteEntryRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete entry method over gRPC. - - Deletes an existing entry. Only entries created through - [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry] - method can be deleted. Users should enable the Data Catalog API - in the project identified by the ``name`` parameter (see [Data - Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - Returns: - Callable[[~.DeleteEntryRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_entry' not in self._stubs: - self._stubs['delete_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteEntry', - request_serializer=datacatalog.DeleteEntryRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_entry'] - - @property - def get_entry(self) -> Callable[ - [datacatalog.GetEntryRequest], - Awaitable[datacatalog.Entry]]: - r"""Return a callable for the get entry method over gRPC. - - Gets an entry. - - Returns: - Callable[[~.GetEntryRequest], - Awaitable[~.Entry]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_entry' not in self._stubs: - self._stubs['get_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/GetEntry', - request_serializer=datacatalog.GetEntryRequest.serialize, - response_deserializer=datacatalog.Entry.deserialize, - ) - return self._stubs['get_entry'] - - @property - def lookup_entry(self) -> Callable[ - [datacatalog.LookupEntryRequest], - Awaitable[datacatalog.Entry]]: - r"""Return a callable for the lookup entry method over gRPC. - - Get an entry by target resource name. This method - allows clients to use the resource name from the source - Google Cloud Platform service to get the Data Catalog - Entry. - - Returns: - Callable[[~.LookupEntryRequest], - Awaitable[~.Entry]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'lookup_entry' not in self._stubs: - self._stubs['lookup_entry'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/LookupEntry', - request_serializer=datacatalog.LookupEntryRequest.serialize, - response_deserializer=datacatalog.Entry.deserialize, - ) - return self._stubs['lookup_entry'] - - @property - def list_entries(self) -> Callable[ - [datacatalog.ListEntriesRequest], - Awaitable[datacatalog.ListEntriesResponse]]: - r"""Return a callable for the list entries method over gRPC. - - Lists entries. - - Returns: - Callable[[~.ListEntriesRequest], - Awaitable[~.ListEntriesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_entries' not in self._stubs: - self._stubs['list_entries'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/ListEntries', - request_serializer=datacatalog.ListEntriesRequest.serialize, - response_deserializer=datacatalog.ListEntriesResponse.deserialize, - ) - return self._stubs['list_entries'] - - @property - def create_tag_template(self) -> Callable[ - [datacatalog.CreateTagTemplateRequest], - Awaitable[tags.TagTemplate]]: - r"""Return a callable for the create tag template method over gRPC. - - Creates a tag template. The user should enable the Data Catalog - API in the project identified by the ``parent`` parameter (see - `Data Catalog Resource - Project `__ - for more information). - - Returns: - Callable[[~.CreateTagTemplateRequest], - Awaitable[~.TagTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_tag_template' not in self._stubs: - self._stubs['create_tag_template'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/CreateTagTemplate', - request_serializer=datacatalog.CreateTagTemplateRequest.serialize, - response_deserializer=tags.TagTemplate.deserialize, - ) - return self._stubs['create_tag_template'] - - @property - def get_tag_template(self) -> Callable[ - [datacatalog.GetTagTemplateRequest], - Awaitable[tags.TagTemplate]]: - r"""Return a callable for the get tag template method over gRPC. - - Gets a tag template. - - Returns: - Callable[[~.GetTagTemplateRequest], - Awaitable[~.TagTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_tag_template' not in self._stubs: - self._stubs['get_tag_template'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/GetTagTemplate', - request_serializer=datacatalog.GetTagTemplateRequest.serialize, - response_deserializer=tags.TagTemplate.deserialize, - ) - return self._stubs['get_tag_template'] - - @property - def update_tag_template(self) -> Callable[ - [datacatalog.UpdateTagTemplateRequest], - Awaitable[tags.TagTemplate]]: - r"""Return a callable for the update tag template method over gRPC. - - Updates a tag template. This method cannot be used to update the - fields of a template. The tag template fields are represented as - separate resources and should be updated using their own - create/update/delete methods. Users should enable the Data - Catalog API in the project identified by the - ``tag_template.name`` parameter (see [Data Catalog Resource - Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - Returns: - Callable[[~.UpdateTagTemplateRequest], - Awaitable[~.TagTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_tag_template' not in self._stubs: - self._stubs['update_tag_template'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/UpdateTagTemplate', - request_serializer=datacatalog.UpdateTagTemplateRequest.serialize, - response_deserializer=tags.TagTemplate.deserialize, - ) - return self._stubs['update_tag_template'] - - @property - def delete_tag_template(self) -> Callable[ - [datacatalog.DeleteTagTemplateRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete tag template method over gRPC. - - Deletes a tag template and all tags using the template. Users - should enable the Data Catalog API in the project identified by - the ``name`` parameter (see [Data Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - Returns: - Callable[[~.DeleteTagTemplateRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_tag_template' not in self._stubs: - self._stubs['delete_tag_template'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteTagTemplate', - request_serializer=datacatalog.DeleteTagTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_tag_template'] - - @property - def create_tag_template_field(self) -> Callable[ - [datacatalog.CreateTagTemplateFieldRequest], - Awaitable[tags.TagTemplateField]]: - r"""Return a callable for the create tag template field method over gRPC. - - Creates a field in a tag template. The user should enable the - Data Catalog API in the project identified by the ``parent`` - parameter (see `Data Catalog Resource - Project `__ - for more information). - - Returns: - Callable[[~.CreateTagTemplateFieldRequest], - Awaitable[~.TagTemplateField]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_tag_template_field' not in self._stubs: - self._stubs['create_tag_template_field'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/CreateTagTemplateField', - request_serializer=datacatalog.CreateTagTemplateFieldRequest.serialize, - response_deserializer=tags.TagTemplateField.deserialize, - ) - return self._stubs['create_tag_template_field'] - - @property - def update_tag_template_field(self) -> Callable[ - [datacatalog.UpdateTagTemplateFieldRequest], - Awaitable[tags.TagTemplateField]]: - r"""Return a callable for the update tag template field method over gRPC. - - Updates a field in a tag template. This method cannot be used to - update the field type. Users should enable the Data Catalog API - in the project identified by the ``name`` parameter (see [Data - Catalog Resource Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - Returns: - Callable[[~.UpdateTagTemplateFieldRequest], - Awaitable[~.TagTemplateField]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_tag_template_field' not in self._stubs: - self._stubs['update_tag_template_field'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/UpdateTagTemplateField', - request_serializer=datacatalog.UpdateTagTemplateFieldRequest.serialize, - response_deserializer=tags.TagTemplateField.deserialize, - ) - return self._stubs['update_tag_template_field'] - - @property - def rename_tag_template_field(self) -> Callable[ - [datacatalog.RenameTagTemplateFieldRequest], - Awaitable[tags.TagTemplateField]]: - r"""Return a callable for the rename tag template field method over gRPC. - - Renames a field in a tag template. The user should enable the - Data Catalog API in the project identified by the ``name`` - parameter (see `Data Catalog Resource - Project `__ - for more information). - - Returns: - Callable[[~.RenameTagTemplateFieldRequest], - Awaitable[~.TagTemplateField]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rename_tag_template_field' not in self._stubs: - self._stubs['rename_tag_template_field'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/RenameTagTemplateField', - request_serializer=datacatalog.RenameTagTemplateFieldRequest.serialize, - response_deserializer=tags.TagTemplateField.deserialize, - ) - return self._stubs['rename_tag_template_field'] - - @property - def rename_tag_template_field_enum_value(self) -> Callable[ - [datacatalog.RenameTagTemplateFieldEnumValueRequest], - Awaitable[tags.TagTemplateField]]: - r"""Return a callable for the rename tag template field enum - value method over gRPC. - - Renames an enum value in a tag template. The enum - values have to be unique within one enum field. Thus, an - enum value cannot be renamed with a name used in any - other enum value within the same enum field. - - Returns: - Callable[[~.RenameTagTemplateFieldEnumValueRequest], - Awaitable[~.TagTemplateField]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rename_tag_template_field_enum_value' not in self._stubs: - self._stubs['rename_tag_template_field_enum_value'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/RenameTagTemplateFieldEnumValue', - request_serializer=datacatalog.RenameTagTemplateFieldEnumValueRequest.serialize, - response_deserializer=tags.TagTemplateField.deserialize, - ) - return self._stubs['rename_tag_template_field_enum_value'] - - @property - def delete_tag_template_field(self) -> Callable[ - [datacatalog.DeleteTagTemplateFieldRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete tag template field method over gRPC. - - Deletes a field in a tag template and all uses of that field. - Users should enable the Data Catalog API in the project - identified by the ``name`` parameter (see [Data Catalog Resource - Project] - (https://cloud.google.com/data-catalog/docs/concepts/resource-project) - for more information). - - Returns: - Callable[[~.DeleteTagTemplateFieldRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_tag_template_field' not in self._stubs: - self._stubs['delete_tag_template_field'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteTagTemplateField', - request_serializer=datacatalog.DeleteTagTemplateFieldRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_tag_template_field'] - - @property - def create_tag(self) -> Callable[ - [datacatalog.CreateTagRequest], - Awaitable[tags.Tag]]: - r"""Return a callable for the create tag method over gRPC. - - Creates a tag on an - [Entry][google.cloud.datacatalog.v1beta1.Entry]. Note: The - project identified by the ``parent`` parameter for the - `tag `__ - and the `tag - template `__ - used to create the tag must be from the same organization. - - Returns: - Callable[[~.CreateTagRequest], - Awaitable[~.Tag]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_tag' not in self._stubs: - self._stubs['create_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/CreateTag', - request_serializer=datacatalog.CreateTagRequest.serialize, - response_deserializer=tags.Tag.deserialize, - ) - return self._stubs['create_tag'] - - @property - def update_tag(self) -> Callable[ - [datacatalog.UpdateTagRequest], - Awaitable[tags.Tag]]: - r"""Return a callable for the update tag method over gRPC. - - Updates an existing tag. - - Returns: - Callable[[~.UpdateTagRequest], - Awaitable[~.Tag]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_tag' not in self._stubs: - self._stubs['update_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/UpdateTag', - request_serializer=datacatalog.UpdateTagRequest.serialize, - response_deserializer=tags.Tag.deserialize, - ) - return self._stubs['update_tag'] - - @property - def delete_tag(self) -> Callable[ - [datacatalog.DeleteTagRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete tag method over gRPC. - - Deletes a tag. - - Returns: - Callable[[~.DeleteTagRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_tag' not in self._stubs: - self._stubs['delete_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteTag', - request_serializer=datacatalog.DeleteTagRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_tag'] - - @property - def list_tags(self) -> Callable[ - [datacatalog.ListTagsRequest], - Awaitable[datacatalog.ListTagsResponse]]: - r"""Return a callable for the list tags method over gRPC. - - Lists tags assigned to an - [Entry][google.cloud.datacatalog.v1beta1.Entry]. The - [columns][google.cloud.datacatalog.v1beta1.Tag.column] in the - response are lowercased. - - Returns: - Callable[[~.ListTagsRequest], - Awaitable[~.ListTagsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_tags' not in self._stubs: - self._stubs['list_tags'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/ListTags', - request_serializer=datacatalog.ListTagsRequest.serialize, - response_deserializer=datacatalog.ListTagsResponse.deserialize, - ) - return self._stubs['list_tags'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy for a resource. Replaces any - existing policy. Supported resources are: - - - Tag templates. - - Entries. - - Entry groups. Note, this method cannot be used to manage - policies for BigQuery, Pub/Sub and any external Google Cloud - Platform resources synced to Data Catalog. - - Callers must have following Google IAM permission - - - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on - tag templates. - - ``datacatalog.entries.setIamPolicy`` to set policies on - entries. - - ``datacatalog.entryGroups.setIamPolicy`` to set policies on - entry groups. - - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for a resource. A ``NOT_FOUND`` - error is returned if the resource does not exist. An empty - policy is returned if the resource exists but does not have a - policy set on it. - - Supported resources are: - - - Tag templates. - - Entries. - - Entry groups. Note, this method cannot be used to manage - policies for BigQuery, Pub/Sub and any external Google Cloud - Platform resources synced to Data Catalog. - - Callers must have following Google IAM permission - - - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on - tag templates. - - ``datacatalog.entries.getIamPolicy`` to get policies on - entries. - - ``datacatalog.entryGroups.getIamPolicy`` to get policies on - entry groups. - - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns the caller's permissions on a resource. If the resource - does not exist, an empty set of permissions is returned (We - don't return a ``NOT_FOUND`` error). - - Supported resources are: - - - Tag templates. - - Entries. - - Entry groups. Note, this method cannot be used to manage - policies for BigQuery, Pub/Sub and any external Google Cloud - Platform resources synced to Data Catalog. - - A caller is not required to have Google IAM permission to make - this request. - - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.DataCatalog/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'DataCatalogGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/__init__.py deleted file mode 100644 index cde5f3a05abb..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import PolicyTagManagerClient -from .async_client import PolicyTagManagerAsyncClient - -__all__ = ( - 'PolicyTagManagerClient', - 'PolicyTagManagerAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py deleted file mode 100644 index e79b1a312ca2..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py +++ /dev/null @@ -1,1582 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.datacatalog_v1beta1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.datacatalog_v1beta1.services.policy_tag_manager import pagers -from google.cloud.datacatalog_v1beta1.types import policytagmanager -from google.cloud.datacatalog_v1beta1.types import timestamps -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .transports.base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport -from .client import PolicyTagManagerClient - - -class PolicyTagManagerAsyncClient: - """The policy tag manager API service allows clients to manage - their taxonomies and policy tags. - """ - - _client: PolicyTagManagerClient - - DEFAULT_ENDPOINT = PolicyTagManagerClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = PolicyTagManagerClient.DEFAULT_MTLS_ENDPOINT - - policy_tag_path = staticmethod(PolicyTagManagerClient.policy_tag_path) - parse_policy_tag_path = staticmethod(PolicyTagManagerClient.parse_policy_tag_path) - taxonomy_path = staticmethod(PolicyTagManagerClient.taxonomy_path) - parse_taxonomy_path = staticmethod(PolicyTagManagerClient.parse_taxonomy_path) - common_billing_account_path = staticmethod(PolicyTagManagerClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(PolicyTagManagerClient.parse_common_billing_account_path) - common_folder_path = staticmethod(PolicyTagManagerClient.common_folder_path) - parse_common_folder_path = staticmethod(PolicyTagManagerClient.parse_common_folder_path) - common_organization_path = staticmethod(PolicyTagManagerClient.common_organization_path) - parse_common_organization_path = staticmethod(PolicyTagManagerClient.parse_common_organization_path) - common_project_path = staticmethod(PolicyTagManagerClient.common_project_path) - parse_common_project_path = staticmethod(PolicyTagManagerClient.parse_common_project_path) - common_location_path = staticmethod(PolicyTagManagerClient.common_location_path) - parse_common_location_path = staticmethod(PolicyTagManagerClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PolicyTagManagerAsyncClient: The constructed client. - """ - return PolicyTagManagerClient.from_service_account_info.__func__(PolicyTagManagerAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PolicyTagManagerAsyncClient: The constructed client. - """ - return PolicyTagManagerClient.from_service_account_file.__func__(PolicyTagManagerAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return PolicyTagManagerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> PolicyTagManagerTransport: - """Returns the transport used by the client instance. - - Returns: - PolicyTagManagerTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(PolicyTagManagerClient).get_transport_class, type(PolicyTagManagerClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, PolicyTagManagerTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the policy tag manager client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.PolicyTagManagerTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = PolicyTagManagerClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_taxonomy(self, - request: Optional[Union[policytagmanager.CreateTaxonomyRequest, dict]] = None, - *, - parent: Optional[str] = None, - taxonomy: Optional[policytagmanager.Taxonomy] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.Taxonomy: - r"""Creates a taxonomy in the specified project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_create_taxonomy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.CreateTaxonomyRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_taxonomy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.CreateTaxonomyRequest, dict]]): - The request object. Request message for - [CreateTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.CreateTaxonomy]. - parent (:class:`str`): - Required. Resource name of the - project that the taxonomy will belong - to. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - taxonomy (:class:`google.cloud.datacatalog_v1beta1.types.Taxonomy`): - The taxonomy to be created. - This corresponds to the ``taxonomy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.Taxonomy: - A taxonomy is a collection of policy tags that classify data along a common - axis. For instance a data *sensitivity* taxonomy - could contain policy tags denoting PII such as age, - zipcode, and SSN. A data *origin* taxonomy could - contain policy tags to distinguish user data, - employee data, partner data, public data. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, taxonomy]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = policytagmanager.CreateTaxonomyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if taxonomy is not None: - request.taxonomy = taxonomy - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_taxonomy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_taxonomy(self, - request: Optional[Union[policytagmanager.DeleteTaxonomyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a taxonomy. This operation will also delete - all policy tags in this taxonomy along with their - associated policies. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_delete_taxonomy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteTaxonomyRequest( - name="name_value", - ) - - # Make the request - await client.delete_taxonomy(request=request) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.DeleteTaxonomyRequest, dict]]): - The request object. Request message for - [DeleteTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.DeleteTaxonomy]. - name (:class:`str`): - Required. Resource name of the - taxonomy to be deleted. All policy tags - in this taxonomy will also be deleted. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = policytagmanager.DeleteTaxonomyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_taxonomy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def update_taxonomy(self, - request: Optional[Union[policytagmanager.UpdateTaxonomyRequest, dict]] = None, - *, - taxonomy: Optional[policytagmanager.Taxonomy] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.Taxonomy: - r"""Updates a taxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_update_taxonomy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.UpdateTaxonomyRequest( - ) - - # Make the request - response = await client.update_taxonomy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.UpdateTaxonomyRequest, dict]]): - The request object. Request message for - [UpdateTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.UpdateTaxonomy]. - taxonomy (:class:`google.cloud.datacatalog_v1beta1.types.Taxonomy`): - The taxonomy to update. Only description, display_name, - and activated policy types can be updated. - - This corresponds to the ``taxonomy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.Taxonomy: - A taxonomy is a collection of policy tags that classify data along a common - axis. For instance a data *sensitivity* taxonomy - could contain policy tags denoting PII such as age, - zipcode, and SSN. A data *origin* taxonomy could - contain policy tags to distinguish user data, - employee data, partner data, public data. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([taxonomy]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = policytagmanager.UpdateTaxonomyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if taxonomy is not None: - request.taxonomy = taxonomy - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_taxonomy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("taxonomy.name", request.taxonomy.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_taxonomies(self, - request: Optional[Union[policytagmanager.ListTaxonomiesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTaxonomiesAsyncPager: - r"""Lists all taxonomies in a project in a particular - location that the caller has permission to view. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_list_taxonomies(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ListTaxonomiesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_taxonomies(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.ListTaxonomiesRequest, dict]]): - The request object. Request message for - [ListTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListTaxonomies]. - parent (:class:`str`): - Required. Resource name of the - project to list the taxonomies of. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.services.policy_tag_manager.pagers.ListTaxonomiesAsyncPager: - Response message for - [ListTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListTaxonomies]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = policytagmanager.ListTaxonomiesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_taxonomies, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListTaxonomiesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_taxonomy(self, - request: Optional[Union[policytagmanager.GetTaxonomyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.Taxonomy: - r"""Gets a taxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_get_taxonomy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.GetTaxonomyRequest( - name="name_value", - ) - - # Make the request - response = await client.get_taxonomy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.GetTaxonomyRequest, dict]]): - The request object. Request message for - [GetTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.GetTaxonomy]. - name (:class:`str`): - Required. Resource name of the - requested taxonomy. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.Taxonomy: - A taxonomy is a collection of policy tags that classify data along a common - axis. For instance a data *sensitivity* taxonomy - could contain policy tags denoting PII such as age, - zipcode, and SSN. A data *origin* taxonomy could - contain policy tags to distinguish user data, - employee data, partner data, public data. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = policytagmanager.GetTaxonomyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_taxonomy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_policy_tag(self, - request: Optional[Union[policytagmanager.CreatePolicyTagRequest, dict]] = None, - *, - parent: Optional[str] = None, - policy_tag: Optional[policytagmanager.PolicyTag] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.PolicyTag: - r"""Creates a policy tag in the specified taxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_create_policy_tag(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.CreatePolicyTagRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_policy_tag(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.CreatePolicyTagRequest, dict]]): - The request object. Request message for - [CreatePolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.CreatePolicyTag]. - parent (:class:`str`): - Required. Resource name of the - taxonomy that the policy tag will belong - to. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - policy_tag (:class:`google.cloud.datacatalog_v1beta1.types.PolicyTag`): - The policy tag to be created. - This corresponds to the ``policy_tag`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.PolicyTag: - Denotes one policy tag in a taxonomy - (e.g. ssn). Policy Tags can be defined - in a hierarchy. For example, consider - the following hierarchy: - - Geolocation -> (LatLong, City, - ZipCode). PolicyTag "Geolocation" - contains three child policy tags: - "LatLong", "City", and "ZipCode". - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, policy_tag]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = policytagmanager.CreatePolicyTagRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if policy_tag is not None: - request.policy_tag = policy_tag - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_policy_tag, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_policy_tag(self, - request: Optional[Union[policytagmanager.DeletePolicyTagRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a policy tag. Also deletes all of its - descendant policy tags. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_delete_policy_tag(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeletePolicyTagRequest( - name="name_value", - ) - - # Make the request - await client.delete_policy_tag(request=request) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.DeletePolicyTagRequest, dict]]): - The request object. Request message for - [DeletePolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.DeletePolicyTag]. - name (:class:`str`): - Required. Resource name of the policy - tag to be deleted. All of its descendant - policy tags will also be deleted. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = policytagmanager.DeletePolicyTagRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_policy_tag, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def update_policy_tag(self, - request: Optional[Union[policytagmanager.UpdatePolicyTagRequest, dict]] = None, - *, - policy_tag: Optional[policytagmanager.PolicyTag] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.PolicyTag: - r"""Updates a policy tag. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_update_policy_tag(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.UpdatePolicyTagRequest( - ) - - # Make the request - response = await client.update_policy_tag(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.UpdatePolicyTagRequest, dict]]): - The request object. Request message for - [UpdatePolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.UpdatePolicyTag]. - policy_tag (:class:`google.cloud.datacatalog_v1beta1.types.PolicyTag`): - The policy tag to update. Only the description, - display_name, and parent_policy_tag fields can be - updated. - - This corresponds to the ``policy_tag`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.PolicyTag: - Denotes one policy tag in a taxonomy - (e.g. ssn). Policy Tags can be defined - in a hierarchy. For example, consider - the following hierarchy: - - Geolocation -> (LatLong, City, - ZipCode). PolicyTag "Geolocation" - contains three child policy tags: - "LatLong", "City", and "ZipCode". - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([policy_tag]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = policytagmanager.UpdatePolicyTagRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if policy_tag is not None: - request.policy_tag = policy_tag - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_policy_tag, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("policy_tag.name", request.policy_tag.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_policy_tags(self, - request: Optional[Union[policytagmanager.ListPolicyTagsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListPolicyTagsAsyncPager: - r"""Lists all policy tags in a taxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_list_policy_tags(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ListPolicyTagsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_policy_tags(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.ListPolicyTagsRequest, dict]]): - The request object. Request message for - [ListPolicyTags][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListPolicyTags]. - parent (:class:`str`): - Required. Resource name of the - taxonomy to list the policy tags of. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.services.policy_tag_manager.pagers.ListPolicyTagsAsyncPager: - Response message for - [ListPolicyTags][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListPolicyTags]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = policytagmanager.ListPolicyTagsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_policy_tags, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListPolicyTagsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_policy_tag(self, - request: Optional[Union[policytagmanager.GetPolicyTagRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.PolicyTag: - r"""Gets a policy tag. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_get_policy_tag(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.GetPolicyTagRequest( - name="name_value", - ) - - # Make the request - response = await client.get_policy_tag(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.GetPolicyTagRequest, dict]]): - The request object. Request message for - [GetPolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.GetPolicyTag]. - name (:class:`str`): - Required. Resource name of the - requested policy tag. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.PolicyTag: - Denotes one policy tag in a taxonomy - (e.g. ssn). Policy Tags can be defined - in a hierarchy. For example, consider - the following hierarchy: - - Geolocation -> (LatLong, City, - ZipCode). PolicyTag "Geolocation" - contains three child policy tags: - "LatLong", "City", and "ZipCode". - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = policytagmanager.GetPolicyTagRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_policy_tag, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM policy for a taxonomy or a policy tag. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_get_iam_policy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): - The request object. Request message for ``GetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM policy for a taxonomy or a policy tag. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_set_iam_policy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns the permissions that a caller has on the - specified taxonomy or policy tag. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_test_iam_permissions(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "PolicyTagManagerAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "PolicyTagManagerAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py deleted file mode 100644 index c6b0980c4c39..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py +++ /dev/null @@ -1,1796 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.datacatalog_v1beta1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.datacatalog_v1beta1.services.policy_tag_manager import pagers -from google.cloud.datacatalog_v1beta1.types import policytagmanager -from google.cloud.datacatalog_v1beta1.types import timestamps -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .transports.base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import PolicyTagManagerGrpcTransport -from .transports.grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport - - -class PolicyTagManagerClientMeta(type): - """Metaclass for the PolicyTagManager client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[PolicyTagManagerTransport]] - _transport_registry["grpc"] = PolicyTagManagerGrpcTransport - _transport_registry["grpc_asyncio"] = PolicyTagManagerGrpcAsyncIOTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[PolicyTagManagerTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class PolicyTagManagerClient(metaclass=PolicyTagManagerClientMeta): - """The policy tag manager API service allows clients to manage - their taxonomies and policy tags. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "datacatalog.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PolicyTagManagerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PolicyTagManagerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> PolicyTagManagerTransport: - """Returns the transport used by the client instance. - - Returns: - PolicyTagManagerTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def policy_tag_path(project: str,location: str,taxonomy: str,policy_tag: str,) -> str: - """Returns a fully-qualified policy_tag string.""" - return "projects/{project}/locations/{location}/taxonomies/{taxonomy}/policyTags/{policy_tag}".format(project=project, location=location, taxonomy=taxonomy, policy_tag=policy_tag, ) - - @staticmethod - def parse_policy_tag_path(path: str) -> Dict[str,str]: - """Parses a policy_tag path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/taxonomies/(?P.+?)/policyTags/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def taxonomy_path(project: str,location: str,taxonomy: str,) -> str: - """Returns a fully-qualified taxonomy string.""" - return "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format(project=project, location=location, taxonomy=taxonomy, ) - - @staticmethod - def parse_taxonomy_path(path: str) -> Dict[str,str]: - """Parses a taxonomy path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/taxonomies/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, PolicyTagManagerTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the policy tag manager client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, PolicyTagManagerTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, PolicyTagManagerTransport): - # transport is a PolicyTagManagerTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def create_taxonomy(self, - request: Optional[Union[policytagmanager.CreateTaxonomyRequest, dict]] = None, - *, - parent: Optional[str] = None, - taxonomy: Optional[policytagmanager.Taxonomy] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.Taxonomy: - r"""Creates a taxonomy in the specified project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_create_taxonomy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.CreateTaxonomyRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_taxonomy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.CreateTaxonomyRequest, dict]): - The request object. Request message for - [CreateTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.CreateTaxonomy]. - parent (str): - Required. Resource name of the - project that the taxonomy will belong - to. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - taxonomy (google.cloud.datacatalog_v1beta1.types.Taxonomy): - The taxonomy to be created. - This corresponds to the ``taxonomy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.Taxonomy: - A taxonomy is a collection of policy tags that classify data along a common - axis. For instance a data *sensitivity* taxonomy - could contain policy tags denoting PII such as age, - zipcode, and SSN. A data *origin* taxonomy could - contain policy tags to distinguish user data, - employee data, partner data, public data. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, taxonomy]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanager.CreateTaxonomyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanager.CreateTaxonomyRequest): - request = policytagmanager.CreateTaxonomyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if taxonomy is not None: - request.taxonomy = taxonomy - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_taxonomy(self, - request: Optional[Union[policytagmanager.DeleteTaxonomyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a taxonomy. This operation will also delete - all policy tags in this taxonomy along with their - associated policies. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_delete_taxonomy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteTaxonomyRequest( - name="name_value", - ) - - # Make the request - client.delete_taxonomy(request=request) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.DeleteTaxonomyRequest, dict]): - The request object. Request message for - [DeleteTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.DeleteTaxonomy]. - name (str): - Required. Resource name of the - taxonomy to be deleted. All policy tags - in this taxonomy will also be deleted. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanager.DeleteTaxonomyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanager.DeleteTaxonomyRequest): - request = policytagmanager.DeleteTaxonomyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def update_taxonomy(self, - request: Optional[Union[policytagmanager.UpdateTaxonomyRequest, dict]] = None, - *, - taxonomy: Optional[policytagmanager.Taxonomy] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.Taxonomy: - r"""Updates a taxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_update_taxonomy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.UpdateTaxonomyRequest( - ) - - # Make the request - response = client.update_taxonomy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.UpdateTaxonomyRequest, dict]): - The request object. Request message for - [UpdateTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.UpdateTaxonomy]. - taxonomy (google.cloud.datacatalog_v1beta1.types.Taxonomy): - The taxonomy to update. Only description, display_name, - and activated policy types can be updated. - - This corresponds to the ``taxonomy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.Taxonomy: - A taxonomy is a collection of policy tags that classify data along a common - axis. For instance a data *sensitivity* taxonomy - could contain policy tags denoting PII such as age, - zipcode, and SSN. A data *origin* taxonomy could - contain policy tags to distinguish user data, - employee data, partner data, public data. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([taxonomy]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanager.UpdateTaxonomyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanager.UpdateTaxonomyRequest): - request = policytagmanager.UpdateTaxonomyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if taxonomy is not None: - request.taxonomy = taxonomy - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("taxonomy.name", request.taxonomy.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_taxonomies(self, - request: Optional[Union[policytagmanager.ListTaxonomiesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTaxonomiesPager: - r"""Lists all taxonomies in a project in a particular - location that the caller has permission to view. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_list_taxonomies(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ListTaxonomiesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_taxonomies(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.ListTaxonomiesRequest, dict]): - The request object. Request message for - [ListTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListTaxonomies]. - parent (str): - Required. Resource name of the - project to list the taxonomies of. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.services.policy_tag_manager.pagers.ListTaxonomiesPager: - Response message for - [ListTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListTaxonomies]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanager.ListTaxonomiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanager.ListTaxonomiesRequest): - request = policytagmanager.ListTaxonomiesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_taxonomies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListTaxonomiesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_taxonomy(self, - request: Optional[Union[policytagmanager.GetTaxonomyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.Taxonomy: - r"""Gets a taxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_get_taxonomy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.GetTaxonomyRequest( - name="name_value", - ) - - # Make the request - response = client.get_taxonomy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.GetTaxonomyRequest, dict]): - The request object. Request message for - [GetTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.GetTaxonomy]. - name (str): - Required. Resource name of the - requested taxonomy. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.Taxonomy: - A taxonomy is a collection of policy tags that classify data along a common - axis. For instance a data *sensitivity* taxonomy - could contain policy tags denoting PII such as age, - zipcode, and SSN. A data *origin* taxonomy could - contain policy tags to distinguish user data, - employee data, partner data, public data. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanager.GetTaxonomyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanager.GetTaxonomyRequest): - request = policytagmanager.GetTaxonomyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_taxonomy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_policy_tag(self, - request: Optional[Union[policytagmanager.CreatePolicyTagRequest, dict]] = None, - *, - parent: Optional[str] = None, - policy_tag: Optional[policytagmanager.PolicyTag] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.PolicyTag: - r"""Creates a policy tag in the specified taxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_create_policy_tag(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.CreatePolicyTagRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_policy_tag(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.CreatePolicyTagRequest, dict]): - The request object. Request message for - [CreatePolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.CreatePolicyTag]. - parent (str): - Required. Resource name of the - taxonomy that the policy tag will belong - to. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - policy_tag (google.cloud.datacatalog_v1beta1.types.PolicyTag): - The policy tag to be created. - This corresponds to the ``policy_tag`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.PolicyTag: - Denotes one policy tag in a taxonomy - (e.g. ssn). Policy Tags can be defined - in a hierarchy. For example, consider - the following hierarchy: - - Geolocation -> (LatLong, City, - ZipCode). PolicyTag "Geolocation" - contains three child policy tags: - "LatLong", "City", and "ZipCode". - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, policy_tag]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanager.CreatePolicyTagRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanager.CreatePolicyTagRequest): - request = policytagmanager.CreatePolicyTagRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if policy_tag is not None: - request.policy_tag = policy_tag - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_policy_tag] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_policy_tag(self, - request: Optional[Union[policytagmanager.DeletePolicyTagRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a policy tag. Also deletes all of its - descendant policy tags. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_delete_policy_tag(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeletePolicyTagRequest( - name="name_value", - ) - - # Make the request - client.delete_policy_tag(request=request) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.DeletePolicyTagRequest, dict]): - The request object. Request message for - [DeletePolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.DeletePolicyTag]. - name (str): - Required. Resource name of the policy - tag to be deleted. All of its descendant - policy tags will also be deleted. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanager.DeletePolicyTagRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanager.DeletePolicyTagRequest): - request = policytagmanager.DeletePolicyTagRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_policy_tag] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def update_policy_tag(self, - request: Optional[Union[policytagmanager.UpdatePolicyTagRequest, dict]] = None, - *, - policy_tag: Optional[policytagmanager.PolicyTag] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.PolicyTag: - r"""Updates a policy tag. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_update_policy_tag(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.UpdatePolicyTagRequest( - ) - - # Make the request - response = client.update_policy_tag(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.UpdatePolicyTagRequest, dict]): - The request object. Request message for - [UpdatePolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.UpdatePolicyTag]. - policy_tag (google.cloud.datacatalog_v1beta1.types.PolicyTag): - The policy tag to update. Only the description, - display_name, and parent_policy_tag fields can be - updated. - - This corresponds to the ``policy_tag`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.PolicyTag: - Denotes one policy tag in a taxonomy - (e.g. ssn). Policy Tags can be defined - in a hierarchy. For example, consider - the following hierarchy: - - Geolocation -> (LatLong, City, - ZipCode). PolicyTag "Geolocation" - contains three child policy tags: - "LatLong", "City", and "ZipCode". - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([policy_tag]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanager.UpdatePolicyTagRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanager.UpdatePolicyTagRequest): - request = policytagmanager.UpdatePolicyTagRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if policy_tag is not None: - request.policy_tag = policy_tag - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_policy_tag] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("policy_tag.name", request.policy_tag.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_policy_tags(self, - request: Optional[Union[policytagmanager.ListPolicyTagsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListPolicyTagsPager: - r"""Lists all policy tags in a taxonomy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_list_policy_tags(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ListPolicyTagsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_policy_tags(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.ListPolicyTagsRequest, dict]): - The request object. Request message for - [ListPolicyTags][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListPolicyTags]. - parent (str): - Required. Resource name of the - taxonomy to list the policy tags of. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.services.policy_tag_manager.pagers.ListPolicyTagsPager: - Response message for - [ListPolicyTags][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListPolicyTags]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanager.ListPolicyTagsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanager.ListPolicyTagsRequest): - request = policytagmanager.ListPolicyTagsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_policy_tags] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListPolicyTagsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_policy_tag(self, - request: Optional[Union[policytagmanager.GetPolicyTagRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanager.PolicyTag: - r"""Gets a policy tag. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_get_policy_tag(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.GetPolicyTagRequest( - name="name_value", - ) - - # Make the request - response = client.get_policy_tag(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.GetPolicyTagRequest, dict]): - The request object. Request message for - [GetPolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.GetPolicyTag]. - name (str): - Required. Resource name of the - requested policy tag. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.PolicyTag: - Denotes one policy tag in a taxonomy - (e.g. ssn). Policy Tags can be defined - in a hierarchy. For example, consider - the following hierarchy: - - Geolocation -> (LatLong, City, - ZipCode). PolicyTag "Geolocation" - contains three child policy tags: - "LatLong", "City", and "ZipCode". - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanager.GetPolicyTagRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanager.GetPolicyTagRequest): - request = policytagmanager.GetPolicyTagRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_policy_tag] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM policy for a taxonomy or a policy tag. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_get_iam_policy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): - The request object. Request message for ``GetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM policy for a taxonomy or a policy tag. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_set_iam_policy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns the permissions that a caller has on the - specified taxonomy or policy tag. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_test_iam_permissions(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "PolicyTagManagerClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "PolicyTagManagerClient", -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/pagers.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/pagers.py deleted file mode 100644 index c505e3c5ebbe..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/pagers.py +++ /dev/null @@ -1,260 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.datacatalog_v1beta1.types import policytagmanager - - -class ListTaxonomiesPager: - """A pager for iterating through ``list_taxonomies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1beta1.types.ListTaxonomiesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``taxonomies`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTaxonomies`` requests and continue to iterate - through the ``taxonomies`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1beta1.types.ListTaxonomiesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., policytagmanager.ListTaxonomiesResponse], - request: policytagmanager.ListTaxonomiesRequest, - response: policytagmanager.ListTaxonomiesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1beta1.types.ListTaxonomiesRequest): - The initial request object. - response (google.cloud.datacatalog_v1beta1.types.ListTaxonomiesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = policytagmanager.ListTaxonomiesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[policytagmanager.ListTaxonomiesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[policytagmanager.Taxonomy]: - for page in self.pages: - yield from page.taxonomies - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTaxonomiesAsyncPager: - """A pager for iterating through ``list_taxonomies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1beta1.types.ListTaxonomiesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``taxonomies`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListTaxonomies`` requests and continue to iterate - through the ``taxonomies`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1beta1.types.ListTaxonomiesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[policytagmanager.ListTaxonomiesResponse]], - request: policytagmanager.ListTaxonomiesRequest, - response: policytagmanager.ListTaxonomiesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1beta1.types.ListTaxonomiesRequest): - The initial request object. - response (google.cloud.datacatalog_v1beta1.types.ListTaxonomiesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = policytagmanager.ListTaxonomiesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[policytagmanager.ListTaxonomiesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[policytagmanager.Taxonomy]: - async def async_generator(): - async for page in self.pages: - for response in page.taxonomies: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListPolicyTagsPager: - """A pager for iterating through ``list_policy_tags`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1beta1.types.ListPolicyTagsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``policy_tags`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListPolicyTags`` requests and continue to iterate - through the ``policy_tags`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1beta1.types.ListPolicyTagsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., policytagmanager.ListPolicyTagsResponse], - request: policytagmanager.ListPolicyTagsRequest, - response: policytagmanager.ListPolicyTagsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1beta1.types.ListPolicyTagsRequest): - The initial request object. - response (google.cloud.datacatalog_v1beta1.types.ListPolicyTagsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = policytagmanager.ListPolicyTagsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[policytagmanager.ListPolicyTagsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[policytagmanager.PolicyTag]: - for page in self.pages: - yield from page.policy_tags - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListPolicyTagsAsyncPager: - """A pager for iterating through ``list_policy_tags`` requests. - - This class thinly wraps an initial - :class:`google.cloud.datacatalog_v1beta1.types.ListPolicyTagsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``policy_tags`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListPolicyTags`` requests and continue to iterate - through the ``policy_tags`` field on the - corresponding responses. - - All the usual :class:`google.cloud.datacatalog_v1beta1.types.ListPolicyTagsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[policytagmanager.ListPolicyTagsResponse]], - request: policytagmanager.ListPolicyTagsRequest, - response: policytagmanager.ListPolicyTagsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.datacatalog_v1beta1.types.ListPolicyTagsRequest): - The initial request object. - response (google.cloud.datacatalog_v1beta1.types.ListPolicyTagsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = policytagmanager.ListPolicyTagsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[policytagmanager.ListPolicyTagsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[policytagmanager.PolicyTag]: - async def async_generator(): - async for page in self.pages: - for response in page.policy_tags: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/__init__.py deleted file mode 100644 index 192f3e97b958..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import PolicyTagManagerTransport -from .grpc import PolicyTagManagerGrpcTransport -from .grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[PolicyTagManagerTransport]] -_transport_registry['grpc'] = PolicyTagManagerGrpcTransport -_transport_registry['grpc_asyncio'] = PolicyTagManagerGrpcAsyncIOTransport - -__all__ = ( - 'PolicyTagManagerTransport', - 'PolicyTagManagerGrpcTransport', - 'PolicyTagManagerGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py deleted file mode 100644 index c5142bbfcb5d..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py +++ /dev/null @@ -1,320 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.datacatalog_v1beta1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.datacatalog_v1beta1.types import policytagmanager -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class PolicyTagManagerTransport(abc.ABC): - """Abstract transport class for PolicyTagManager.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'datacatalog.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_taxonomy: gapic_v1.method.wrap_method( - self.create_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.delete_taxonomy: gapic_v1.method.wrap_method( - self.delete_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.update_taxonomy: gapic_v1.method.wrap_method( - self.update_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.list_taxonomies: gapic_v1.method.wrap_method( - self.list_taxonomies, - default_timeout=None, - client_info=client_info, - ), - self.get_taxonomy: gapic_v1.method.wrap_method( - self.get_taxonomy, - default_timeout=None, - client_info=client_info, - ), - self.create_policy_tag: gapic_v1.method.wrap_method( - self.create_policy_tag, - default_timeout=None, - client_info=client_info, - ), - self.delete_policy_tag: gapic_v1.method.wrap_method( - self.delete_policy_tag, - default_timeout=None, - client_info=client_info, - ), - self.update_policy_tag: gapic_v1.method.wrap_method( - self.update_policy_tag, - default_timeout=None, - client_info=client_info, - ), - self.list_policy_tags: gapic_v1.method.wrap_method( - self.list_policy_tags, - default_timeout=None, - client_info=client_info, - ), - self.get_policy_tag: gapic_v1.method.wrap_method( - self.get_policy_tag, - default_timeout=None, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_taxonomy(self) -> Callable[ - [policytagmanager.CreateTaxonomyRequest], - Union[ - policytagmanager.Taxonomy, - Awaitable[policytagmanager.Taxonomy] - ]]: - raise NotImplementedError() - - @property - def delete_taxonomy(self) -> Callable[ - [policytagmanager.DeleteTaxonomyRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def update_taxonomy(self) -> Callable[ - [policytagmanager.UpdateTaxonomyRequest], - Union[ - policytagmanager.Taxonomy, - Awaitable[policytagmanager.Taxonomy] - ]]: - raise NotImplementedError() - - @property - def list_taxonomies(self) -> Callable[ - [policytagmanager.ListTaxonomiesRequest], - Union[ - policytagmanager.ListTaxonomiesResponse, - Awaitable[policytagmanager.ListTaxonomiesResponse] - ]]: - raise NotImplementedError() - - @property - def get_taxonomy(self) -> Callable[ - [policytagmanager.GetTaxonomyRequest], - Union[ - policytagmanager.Taxonomy, - Awaitable[policytagmanager.Taxonomy] - ]]: - raise NotImplementedError() - - @property - def create_policy_tag(self) -> Callable[ - [policytagmanager.CreatePolicyTagRequest], - Union[ - policytagmanager.PolicyTag, - Awaitable[policytagmanager.PolicyTag] - ]]: - raise NotImplementedError() - - @property - def delete_policy_tag(self) -> Callable[ - [policytagmanager.DeletePolicyTagRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def update_policy_tag(self) -> Callable[ - [policytagmanager.UpdatePolicyTagRequest], - Union[ - policytagmanager.PolicyTag, - Awaitable[policytagmanager.PolicyTag] - ]]: - raise NotImplementedError() - - @property - def list_policy_tags(self) -> Callable[ - [policytagmanager.ListPolicyTagsRequest], - Union[ - policytagmanager.ListPolicyTagsResponse, - Awaitable[policytagmanager.ListPolicyTagsResponse] - ]]: - raise NotImplementedError() - - @property - def get_policy_tag(self) -> Callable[ - [policytagmanager.GetPolicyTagRequest], - Union[ - policytagmanager.PolicyTag, - Awaitable[policytagmanager.PolicyTag] - ]]: - raise NotImplementedError() - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'PolicyTagManagerTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py deleted file mode 100644 index 08165c6811a8..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py +++ /dev/null @@ -1,586 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.datacatalog_v1beta1.types import policytagmanager -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO - - -class PolicyTagManagerGrpcTransport(PolicyTagManagerTransport): - """gRPC backend transport for PolicyTagManager. - - The policy tag manager API service allows clients to manage - their taxonomies and policy tags. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_taxonomy(self) -> Callable[ - [policytagmanager.CreateTaxonomyRequest], - policytagmanager.Taxonomy]: - r"""Return a callable for the create taxonomy method over gRPC. - - Creates a taxonomy in the specified project. - - Returns: - Callable[[~.CreateTaxonomyRequest], - ~.Taxonomy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_taxonomy' not in self._stubs: - self._stubs['create_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/CreateTaxonomy', - request_serializer=policytagmanager.CreateTaxonomyRequest.serialize, - response_deserializer=policytagmanager.Taxonomy.deserialize, - ) - return self._stubs['create_taxonomy'] - - @property - def delete_taxonomy(self) -> Callable[ - [policytagmanager.DeleteTaxonomyRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete taxonomy method over gRPC. - - Deletes a taxonomy. This operation will also delete - all policy tags in this taxonomy along with their - associated policies. - - Returns: - Callable[[~.DeleteTaxonomyRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_taxonomy' not in self._stubs: - self._stubs['delete_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/DeleteTaxonomy', - request_serializer=policytagmanager.DeleteTaxonomyRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_taxonomy'] - - @property - def update_taxonomy(self) -> Callable[ - [policytagmanager.UpdateTaxonomyRequest], - policytagmanager.Taxonomy]: - r"""Return a callable for the update taxonomy method over gRPC. - - Updates a taxonomy. - - Returns: - Callable[[~.UpdateTaxonomyRequest], - ~.Taxonomy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_taxonomy' not in self._stubs: - self._stubs['update_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/UpdateTaxonomy', - request_serializer=policytagmanager.UpdateTaxonomyRequest.serialize, - response_deserializer=policytagmanager.Taxonomy.deserialize, - ) - return self._stubs['update_taxonomy'] - - @property - def list_taxonomies(self) -> Callable[ - [policytagmanager.ListTaxonomiesRequest], - policytagmanager.ListTaxonomiesResponse]: - r"""Return a callable for the list taxonomies method over gRPC. - - Lists all taxonomies in a project in a particular - location that the caller has permission to view. - - Returns: - Callable[[~.ListTaxonomiesRequest], - ~.ListTaxonomiesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_taxonomies' not in self._stubs: - self._stubs['list_taxonomies'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/ListTaxonomies', - request_serializer=policytagmanager.ListTaxonomiesRequest.serialize, - response_deserializer=policytagmanager.ListTaxonomiesResponse.deserialize, - ) - return self._stubs['list_taxonomies'] - - @property - def get_taxonomy(self) -> Callable[ - [policytagmanager.GetTaxonomyRequest], - policytagmanager.Taxonomy]: - r"""Return a callable for the get taxonomy method over gRPC. - - Gets a taxonomy. - - Returns: - Callable[[~.GetTaxonomyRequest], - ~.Taxonomy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_taxonomy' not in self._stubs: - self._stubs['get_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/GetTaxonomy', - request_serializer=policytagmanager.GetTaxonomyRequest.serialize, - response_deserializer=policytagmanager.Taxonomy.deserialize, - ) - return self._stubs['get_taxonomy'] - - @property - def create_policy_tag(self) -> Callable[ - [policytagmanager.CreatePolicyTagRequest], - policytagmanager.PolicyTag]: - r"""Return a callable for the create policy tag method over gRPC. - - Creates a policy tag in the specified taxonomy. - - Returns: - Callable[[~.CreatePolicyTagRequest], - ~.PolicyTag]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_policy_tag' not in self._stubs: - self._stubs['create_policy_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/CreatePolicyTag', - request_serializer=policytagmanager.CreatePolicyTagRequest.serialize, - response_deserializer=policytagmanager.PolicyTag.deserialize, - ) - return self._stubs['create_policy_tag'] - - @property - def delete_policy_tag(self) -> Callable[ - [policytagmanager.DeletePolicyTagRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete policy tag method over gRPC. - - Deletes a policy tag. Also deletes all of its - descendant policy tags. - - Returns: - Callable[[~.DeletePolicyTagRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_policy_tag' not in self._stubs: - self._stubs['delete_policy_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/DeletePolicyTag', - request_serializer=policytagmanager.DeletePolicyTagRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_policy_tag'] - - @property - def update_policy_tag(self) -> Callable[ - [policytagmanager.UpdatePolicyTagRequest], - policytagmanager.PolicyTag]: - r"""Return a callable for the update policy tag method over gRPC. - - Updates a policy tag. - - Returns: - Callable[[~.UpdatePolicyTagRequest], - ~.PolicyTag]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_policy_tag' not in self._stubs: - self._stubs['update_policy_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/UpdatePolicyTag', - request_serializer=policytagmanager.UpdatePolicyTagRequest.serialize, - response_deserializer=policytagmanager.PolicyTag.deserialize, - ) - return self._stubs['update_policy_tag'] - - @property - def list_policy_tags(self) -> Callable[ - [policytagmanager.ListPolicyTagsRequest], - policytagmanager.ListPolicyTagsResponse]: - r"""Return a callable for the list policy tags method over gRPC. - - Lists all policy tags in a taxonomy. - - Returns: - Callable[[~.ListPolicyTagsRequest], - ~.ListPolicyTagsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_policy_tags' not in self._stubs: - self._stubs['list_policy_tags'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/ListPolicyTags', - request_serializer=policytagmanager.ListPolicyTagsRequest.serialize, - response_deserializer=policytagmanager.ListPolicyTagsResponse.deserialize, - ) - return self._stubs['list_policy_tags'] - - @property - def get_policy_tag(self) -> Callable[ - [policytagmanager.GetPolicyTagRequest], - policytagmanager.PolicyTag]: - r"""Return a callable for the get policy tag method over gRPC. - - Gets a policy tag. - - Returns: - Callable[[~.GetPolicyTagRequest], - ~.PolicyTag]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_policy_tag' not in self._stubs: - self._stubs['get_policy_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/GetPolicyTag', - request_serializer=policytagmanager.GetPolicyTagRequest.serialize, - response_deserializer=policytagmanager.PolicyTag.deserialize, - ) - return self._stubs['get_policy_tag'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the IAM policy for a taxonomy or a policy tag. - - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the IAM policy for a taxonomy or a policy tag. - - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns the permissions that a caller has on the - specified taxonomy or policy tag. - - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - def close(self): - self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'PolicyTagManagerGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py deleted file mode 100644 index b6616acc6c20..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py +++ /dev/null @@ -1,585 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.datacatalog_v1beta1.types import policytagmanager -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO -from .grpc import PolicyTagManagerGrpcTransport - - -class PolicyTagManagerGrpcAsyncIOTransport(PolicyTagManagerTransport): - """gRPC AsyncIO backend transport for PolicyTagManager. - - The policy tag manager API service allows clients to manage - their taxonomies and policy tags. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_taxonomy(self) -> Callable[ - [policytagmanager.CreateTaxonomyRequest], - Awaitable[policytagmanager.Taxonomy]]: - r"""Return a callable for the create taxonomy method over gRPC. - - Creates a taxonomy in the specified project. - - Returns: - Callable[[~.CreateTaxonomyRequest], - Awaitable[~.Taxonomy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_taxonomy' not in self._stubs: - self._stubs['create_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/CreateTaxonomy', - request_serializer=policytagmanager.CreateTaxonomyRequest.serialize, - response_deserializer=policytagmanager.Taxonomy.deserialize, - ) - return self._stubs['create_taxonomy'] - - @property - def delete_taxonomy(self) -> Callable[ - [policytagmanager.DeleteTaxonomyRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete taxonomy method over gRPC. - - Deletes a taxonomy. This operation will also delete - all policy tags in this taxonomy along with their - associated policies. - - Returns: - Callable[[~.DeleteTaxonomyRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_taxonomy' not in self._stubs: - self._stubs['delete_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/DeleteTaxonomy', - request_serializer=policytagmanager.DeleteTaxonomyRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_taxonomy'] - - @property - def update_taxonomy(self) -> Callable[ - [policytagmanager.UpdateTaxonomyRequest], - Awaitable[policytagmanager.Taxonomy]]: - r"""Return a callable for the update taxonomy method over gRPC. - - Updates a taxonomy. - - Returns: - Callable[[~.UpdateTaxonomyRequest], - Awaitable[~.Taxonomy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_taxonomy' not in self._stubs: - self._stubs['update_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/UpdateTaxonomy', - request_serializer=policytagmanager.UpdateTaxonomyRequest.serialize, - response_deserializer=policytagmanager.Taxonomy.deserialize, - ) - return self._stubs['update_taxonomy'] - - @property - def list_taxonomies(self) -> Callable[ - [policytagmanager.ListTaxonomiesRequest], - Awaitable[policytagmanager.ListTaxonomiesResponse]]: - r"""Return a callable for the list taxonomies method over gRPC. - - Lists all taxonomies in a project in a particular - location that the caller has permission to view. - - Returns: - Callable[[~.ListTaxonomiesRequest], - Awaitable[~.ListTaxonomiesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_taxonomies' not in self._stubs: - self._stubs['list_taxonomies'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/ListTaxonomies', - request_serializer=policytagmanager.ListTaxonomiesRequest.serialize, - response_deserializer=policytagmanager.ListTaxonomiesResponse.deserialize, - ) - return self._stubs['list_taxonomies'] - - @property - def get_taxonomy(self) -> Callable[ - [policytagmanager.GetTaxonomyRequest], - Awaitable[policytagmanager.Taxonomy]]: - r"""Return a callable for the get taxonomy method over gRPC. - - Gets a taxonomy. - - Returns: - Callable[[~.GetTaxonomyRequest], - Awaitable[~.Taxonomy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_taxonomy' not in self._stubs: - self._stubs['get_taxonomy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/GetTaxonomy', - request_serializer=policytagmanager.GetTaxonomyRequest.serialize, - response_deserializer=policytagmanager.Taxonomy.deserialize, - ) - return self._stubs['get_taxonomy'] - - @property - def create_policy_tag(self) -> Callable[ - [policytagmanager.CreatePolicyTagRequest], - Awaitable[policytagmanager.PolicyTag]]: - r"""Return a callable for the create policy tag method over gRPC. - - Creates a policy tag in the specified taxonomy. - - Returns: - Callable[[~.CreatePolicyTagRequest], - Awaitable[~.PolicyTag]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_policy_tag' not in self._stubs: - self._stubs['create_policy_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/CreatePolicyTag', - request_serializer=policytagmanager.CreatePolicyTagRequest.serialize, - response_deserializer=policytagmanager.PolicyTag.deserialize, - ) - return self._stubs['create_policy_tag'] - - @property - def delete_policy_tag(self) -> Callable[ - [policytagmanager.DeletePolicyTagRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete policy tag method over gRPC. - - Deletes a policy tag. Also deletes all of its - descendant policy tags. - - Returns: - Callable[[~.DeletePolicyTagRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_policy_tag' not in self._stubs: - self._stubs['delete_policy_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/DeletePolicyTag', - request_serializer=policytagmanager.DeletePolicyTagRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_policy_tag'] - - @property - def update_policy_tag(self) -> Callable[ - [policytagmanager.UpdatePolicyTagRequest], - Awaitable[policytagmanager.PolicyTag]]: - r"""Return a callable for the update policy tag method over gRPC. - - Updates a policy tag. - - Returns: - Callable[[~.UpdatePolicyTagRequest], - Awaitable[~.PolicyTag]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_policy_tag' not in self._stubs: - self._stubs['update_policy_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/UpdatePolicyTag', - request_serializer=policytagmanager.UpdatePolicyTagRequest.serialize, - response_deserializer=policytagmanager.PolicyTag.deserialize, - ) - return self._stubs['update_policy_tag'] - - @property - def list_policy_tags(self) -> Callable[ - [policytagmanager.ListPolicyTagsRequest], - Awaitable[policytagmanager.ListPolicyTagsResponse]]: - r"""Return a callable for the list policy tags method over gRPC. - - Lists all policy tags in a taxonomy. - - Returns: - Callable[[~.ListPolicyTagsRequest], - Awaitable[~.ListPolicyTagsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_policy_tags' not in self._stubs: - self._stubs['list_policy_tags'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/ListPolicyTags', - request_serializer=policytagmanager.ListPolicyTagsRequest.serialize, - response_deserializer=policytagmanager.ListPolicyTagsResponse.deserialize, - ) - return self._stubs['list_policy_tags'] - - @property - def get_policy_tag(self) -> Callable[ - [policytagmanager.GetPolicyTagRequest], - Awaitable[policytagmanager.PolicyTag]]: - r"""Return a callable for the get policy tag method over gRPC. - - Gets a policy tag. - - Returns: - Callable[[~.GetPolicyTagRequest], - Awaitable[~.PolicyTag]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_policy_tag' not in self._stubs: - self._stubs['get_policy_tag'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/GetPolicyTag', - request_serializer=policytagmanager.GetPolicyTagRequest.serialize, - response_deserializer=policytagmanager.PolicyTag.deserialize, - ) - return self._stubs['get_policy_tag'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the IAM policy for a taxonomy or a policy tag. - - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the IAM policy for a taxonomy or a policy tag. - - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns the permissions that a caller has on the - specified taxonomy or policy tag. - - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManager/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'PolicyTagManagerGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/__init__.py deleted file mode 100644 index 0592b8ffb549..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import PolicyTagManagerSerializationClient -from .async_client import PolicyTagManagerSerializationAsyncClient - -__all__ = ( - 'PolicyTagManagerSerializationClient', - 'PolicyTagManagerSerializationAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py deleted file mode 100644 index e84999231ee4..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py +++ /dev/null @@ -1,380 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.datacatalog_v1beta1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.datacatalog_v1beta1.types import policytagmanager -from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .transports.base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport -from .client import PolicyTagManagerSerializationClient - - -class PolicyTagManagerSerializationAsyncClient: - """Policy tag manager serialization API service allows clients - to manipulate their taxonomies and policy tags data with - serialized format. - """ - - _client: PolicyTagManagerSerializationClient - - DEFAULT_ENDPOINT = PolicyTagManagerSerializationClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = PolicyTagManagerSerializationClient.DEFAULT_MTLS_ENDPOINT - - taxonomy_path = staticmethod(PolicyTagManagerSerializationClient.taxonomy_path) - parse_taxonomy_path = staticmethod(PolicyTagManagerSerializationClient.parse_taxonomy_path) - common_billing_account_path = staticmethod(PolicyTagManagerSerializationClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(PolicyTagManagerSerializationClient.parse_common_billing_account_path) - common_folder_path = staticmethod(PolicyTagManagerSerializationClient.common_folder_path) - parse_common_folder_path = staticmethod(PolicyTagManagerSerializationClient.parse_common_folder_path) - common_organization_path = staticmethod(PolicyTagManagerSerializationClient.common_organization_path) - parse_common_organization_path = staticmethod(PolicyTagManagerSerializationClient.parse_common_organization_path) - common_project_path = staticmethod(PolicyTagManagerSerializationClient.common_project_path) - parse_common_project_path = staticmethod(PolicyTagManagerSerializationClient.parse_common_project_path) - common_location_path = staticmethod(PolicyTagManagerSerializationClient.common_location_path) - parse_common_location_path = staticmethod(PolicyTagManagerSerializationClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PolicyTagManagerSerializationAsyncClient: The constructed client. - """ - return PolicyTagManagerSerializationClient.from_service_account_info.__func__(PolicyTagManagerSerializationAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PolicyTagManagerSerializationAsyncClient: The constructed client. - """ - return PolicyTagManagerSerializationClient.from_service_account_file.__func__(PolicyTagManagerSerializationAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return PolicyTagManagerSerializationClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> PolicyTagManagerSerializationTransport: - """Returns the transport used by the client instance. - - Returns: - PolicyTagManagerSerializationTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(PolicyTagManagerSerializationClient).get_transport_class, type(PolicyTagManagerSerializationClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, PolicyTagManagerSerializationTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the policy tag manager serialization client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.PolicyTagManagerSerializationTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = PolicyTagManagerSerializationClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def import_taxonomies(self, - request: Optional[Union[policytagmanagerserialization.ImportTaxonomiesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanagerserialization.ImportTaxonomiesResponse: - r"""Imports all taxonomies and their policy tags to a - project as new taxonomies. - - This method provides a bulk taxonomy / policy tag - creation using nested proto structure. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_import_taxonomies(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerSerializationAsyncClient() - - # Initialize request argument(s) - inline_source = datacatalog_v1beta1.InlineSource() - inline_source.taxonomies.display_name = "display_name_value" - - request = datacatalog_v1beta1.ImportTaxonomiesRequest( - inline_source=inline_source, - parent="parent_value", - ) - - # Make the request - response = await client.import_taxonomies(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.ImportTaxonomiesRequest, dict]]): - The request object. Request message for - [ImportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ImportTaxonomies]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.ImportTaxonomiesResponse: - Response message for - [ImportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ImportTaxonomies]. - - """ - # Create or coerce a protobuf request object. - request = policytagmanagerserialization.ImportTaxonomiesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.import_taxonomies, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def export_taxonomies(self, - request: Optional[Union[policytagmanagerserialization.ExportTaxonomiesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanagerserialization.ExportTaxonomiesResponse: - r"""Exports all taxonomies and their policy tags in a - project. - This method generates SerializedTaxonomy protos with - nested policy tags that can be used as an input for - future ImportTaxonomies calls. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - async def sample_export_taxonomies(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerSerializationAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ExportTaxonomiesRequest( - serialized_taxonomies=True, - parent="parent_value", - taxonomies=['taxonomies_value1', 'taxonomies_value2'], - ) - - # Make the request - response = await client.export_taxonomies(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.datacatalog_v1beta1.types.ExportTaxonomiesRequest, dict]]): - The request object. Request message for - [ExportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ExportTaxonomies]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.ExportTaxonomiesResponse: - Response message for - [ExportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ExportTaxonomies]. - - """ - # Create or coerce a protobuf request object. - request = policytagmanagerserialization.ExportTaxonomiesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.export_taxonomies, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "PolicyTagManagerSerializationAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "PolicyTagManagerSerializationAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py deleted file mode 100644 index 1ca908e0b535..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py +++ /dev/null @@ -1,590 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.datacatalog_v1beta1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.datacatalog_v1beta1.types import policytagmanager -from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .transports.base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import PolicyTagManagerSerializationGrpcTransport -from .transports.grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport - - -class PolicyTagManagerSerializationClientMeta(type): - """Metaclass for the PolicyTagManagerSerialization client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[PolicyTagManagerSerializationTransport]] - _transport_registry["grpc"] = PolicyTagManagerSerializationGrpcTransport - _transport_registry["grpc_asyncio"] = PolicyTagManagerSerializationGrpcAsyncIOTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[PolicyTagManagerSerializationTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class PolicyTagManagerSerializationClient(metaclass=PolicyTagManagerSerializationClientMeta): - """Policy tag manager serialization API service allows clients - to manipulate their taxonomies and policy tags data with - serialized format. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "datacatalog.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PolicyTagManagerSerializationClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PolicyTagManagerSerializationClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> PolicyTagManagerSerializationTransport: - """Returns the transport used by the client instance. - - Returns: - PolicyTagManagerSerializationTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def taxonomy_path(project: str,location: str,taxonomy: str,) -> str: - """Returns a fully-qualified taxonomy string.""" - return "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format(project=project, location=location, taxonomy=taxonomy, ) - - @staticmethod - def parse_taxonomy_path(path: str) -> Dict[str,str]: - """Parses a taxonomy path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/taxonomies/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, PolicyTagManagerSerializationTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the policy tag manager serialization client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, PolicyTagManagerSerializationTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, PolicyTagManagerSerializationTransport): - # transport is a PolicyTagManagerSerializationTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def import_taxonomies(self, - request: Optional[Union[policytagmanagerserialization.ImportTaxonomiesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanagerserialization.ImportTaxonomiesResponse: - r"""Imports all taxonomies and their policy tags to a - project as new taxonomies. - - This method provides a bulk taxonomy / policy tag - creation using nested proto structure. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_import_taxonomies(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerSerializationClient() - - # Initialize request argument(s) - inline_source = datacatalog_v1beta1.InlineSource() - inline_source.taxonomies.display_name = "display_name_value" - - request = datacatalog_v1beta1.ImportTaxonomiesRequest( - inline_source=inline_source, - parent="parent_value", - ) - - # Make the request - response = client.import_taxonomies(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.ImportTaxonomiesRequest, dict]): - The request object. Request message for - [ImportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ImportTaxonomies]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.ImportTaxonomiesResponse: - Response message for - [ImportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ImportTaxonomies]. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanagerserialization.ImportTaxonomiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanagerserialization.ImportTaxonomiesRequest): - request = policytagmanagerserialization.ImportTaxonomiesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.import_taxonomies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def export_taxonomies(self, - request: Optional[Union[policytagmanagerserialization.ExportTaxonomiesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policytagmanagerserialization.ExportTaxonomiesResponse: - r"""Exports all taxonomies and their policy tags in a - project. - This method generates SerializedTaxonomy protos with - nested policy tags that can be used as an input for - future ImportTaxonomies calls. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import datacatalog_v1beta1 - - def sample_export_taxonomies(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerSerializationClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ExportTaxonomiesRequest( - serialized_taxonomies=True, - parent="parent_value", - taxonomies=['taxonomies_value1', 'taxonomies_value2'], - ) - - # Make the request - response = client.export_taxonomies(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.datacatalog_v1beta1.types.ExportTaxonomiesRequest, dict]): - The request object. Request message for - [ExportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ExportTaxonomies]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.datacatalog_v1beta1.types.ExportTaxonomiesResponse: - Response message for - [ExportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ExportTaxonomies]. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a policytagmanagerserialization.ExportTaxonomiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, policytagmanagerserialization.ExportTaxonomiesRequest): - request = policytagmanagerserialization.ExportTaxonomiesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.export_taxonomies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "PolicyTagManagerSerializationClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "PolicyTagManagerSerializationClient", -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/__init__.py deleted file mode 100644 index faf2990e5837..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import PolicyTagManagerSerializationTransport -from .grpc import PolicyTagManagerSerializationGrpcTransport -from .grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[PolicyTagManagerSerializationTransport]] -_transport_registry['grpc'] = PolicyTagManagerSerializationGrpcTransport -_transport_registry['grpc_asyncio'] = PolicyTagManagerSerializationGrpcAsyncIOTransport - -__all__ = ( - 'PolicyTagManagerSerializationTransport', - 'PolicyTagManagerSerializationGrpcTransport', - 'PolicyTagManagerSerializationGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py deleted file mode 100644 index 650cfc4dd3d0..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py +++ /dev/null @@ -1,165 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.datacatalog_v1beta1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class PolicyTagManagerSerializationTransport(abc.ABC): - """Abstract transport class for PolicyTagManagerSerialization.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'datacatalog.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.import_taxonomies: gapic_v1.method.wrap_method( - self.import_taxonomies, - default_timeout=None, - client_info=client_info, - ), - self.export_taxonomies: gapic_v1.method.wrap_method( - self.export_taxonomies, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def import_taxonomies(self) -> Callable[ - [policytagmanagerserialization.ImportTaxonomiesRequest], - Union[ - policytagmanagerserialization.ImportTaxonomiesResponse, - Awaitable[policytagmanagerserialization.ImportTaxonomiesResponse] - ]]: - raise NotImplementedError() - - @property - def export_taxonomies(self) -> Callable[ - [policytagmanagerserialization.ExportTaxonomiesRequest], - Union[ - policytagmanagerserialization.ExportTaxonomiesResponse, - Awaitable[policytagmanagerserialization.ExportTaxonomiesResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'PolicyTagManagerSerializationTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py deleted file mode 100644 index 17c718d72ec0..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py +++ /dev/null @@ -1,303 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO - - -class PolicyTagManagerSerializationGrpcTransport(PolicyTagManagerSerializationTransport): - """gRPC backend transport for PolicyTagManagerSerialization. - - Policy tag manager serialization API service allows clients - to manipulate their taxonomies and policy tags data with - serialized format. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def import_taxonomies(self) -> Callable[ - [policytagmanagerserialization.ImportTaxonomiesRequest], - policytagmanagerserialization.ImportTaxonomiesResponse]: - r"""Return a callable for the import taxonomies method over gRPC. - - Imports all taxonomies and their policy tags to a - project as new taxonomies. - - This method provides a bulk taxonomy / policy tag - creation using nested proto structure. - - Returns: - Callable[[~.ImportTaxonomiesRequest], - ~.ImportTaxonomiesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'import_taxonomies' not in self._stubs: - self._stubs['import_taxonomies'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization/ImportTaxonomies', - request_serializer=policytagmanagerserialization.ImportTaxonomiesRequest.serialize, - response_deserializer=policytagmanagerserialization.ImportTaxonomiesResponse.deserialize, - ) - return self._stubs['import_taxonomies'] - - @property - def export_taxonomies(self) -> Callable[ - [policytagmanagerserialization.ExportTaxonomiesRequest], - policytagmanagerserialization.ExportTaxonomiesResponse]: - r"""Return a callable for the export taxonomies method over gRPC. - - Exports all taxonomies and their policy tags in a - project. - This method generates SerializedTaxonomy protos with - nested policy tags that can be used as an input for - future ImportTaxonomies calls. - - Returns: - Callable[[~.ExportTaxonomiesRequest], - ~.ExportTaxonomiesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'export_taxonomies' not in self._stubs: - self._stubs['export_taxonomies'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization/ExportTaxonomies', - request_serializer=policytagmanagerserialization.ExportTaxonomiesRequest.serialize, - response_deserializer=policytagmanagerserialization.ExportTaxonomiesResponse.deserialize, - ) - return self._stubs['export_taxonomies'] - - def close(self): - self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'PolicyTagManagerSerializationGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py deleted file mode 100644 index e2ca1f84ad53..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py +++ /dev/null @@ -1,302 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO -from .grpc import PolicyTagManagerSerializationGrpcTransport - - -class PolicyTagManagerSerializationGrpcAsyncIOTransport(PolicyTagManagerSerializationTransport): - """gRPC AsyncIO backend transport for PolicyTagManagerSerialization. - - Policy tag manager serialization API service allows clients - to manipulate their taxonomies and policy tags data with - serialized format. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'datacatalog.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def import_taxonomies(self) -> Callable[ - [policytagmanagerserialization.ImportTaxonomiesRequest], - Awaitable[policytagmanagerserialization.ImportTaxonomiesResponse]]: - r"""Return a callable for the import taxonomies method over gRPC. - - Imports all taxonomies and their policy tags to a - project as new taxonomies. - - This method provides a bulk taxonomy / policy tag - creation using nested proto structure. - - Returns: - Callable[[~.ImportTaxonomiesRequest], - Awaitable[~.ImportTaxonomiesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'import_taxonomies' not in self._stubs: - self._stubs['import_taxonomies'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization/ImportTaxonomies', - request_serializer=policytagmanagerserialization.ImportTaxonomiesRequest.serialize, - response_deserializer=policytagmanagerserialization.ImportTaxonomiesResponse.deserialize, - ) - return self._stubs['import_taxonomies'] - - @property - def export_taxonomies(self) -> Callable[ - [policytagmanagerserialization.ExportTaxonomiesRequest], - Awaitable[policytagmanagerserialization.ExportTaxonomiesResponse]]: - r"""Return a callable for the export taxonomies method over gRPC. - - Exports all taxonomies and their policy tags in a - project. - This method generates SerializedTaxonomy protos with - nested policy tags that can be used as an input for - future ImportTaxonomies calls. - - Returns: - Callable[[~.ExportTaxonomiesRequest], - Awaitable[~.ExportTaxonomiesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'export_taxonomies' not in self._stubs: - self._stubs['export_taxonomies'] = self.grpc_channel.unary_unary( - '/google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization/ExportTaxonomies', - request_serializer=policytagmanagerserialization.ExportTaxonomiesRequest.serialize, - response_deserializer=policytagmanagerserialization.ExportTaxonomiesResponse.deserialize, - ) - return self._stubs['export_taxonomies'] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'PolicyTagManagerSerializationGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/__init__.py deleted file mode 100644 index c895c656f715..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/__init__.py +++ /dev/null @@ -1,184 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .datacatalog import ( - CreateEntryGroupRequest, - CreateEntryRequest, - CreateTagRequest, - CreateTagTemplateFieldRequest, - CreateTagTemplateRequest, - DeleteEntryGroupRequest, - DeleteEntryRequest, - DeleteTagRequest, - DeleteTagTemplateFieldRequest, - DeleteTagTemplateRequest, - Entry, - EntryGroup, - GetEntryGroupRequest, - GetEntryRequest, - GetTagTemplateRequest, - ListEntriesRequest, - ListEntriesResponse, - ListEntryGroupsRequest, - ListEntryGroupsResponse, - ListTagsRequest, - ListTagsResponse, - LookupEntryRequest, - RenameTagTemplateFieldEnumValueRequest, - RenameTagTemplateFieldRequest, - SearchCatalogRequest, - SearchCatalogResponse, - UpdateEntryGroupRequest, - UpdateEntryRequest, - UpdateTagRequest, - UpdateTagTemplateFieldRequest, - UpdateTagTemplateRequest, - EntryType, -) -from .gcs_fileset_spec import ( - GcsFilesetSpec, - GcsFileSpec, -) -from .policytagmanager import ( - CreatePolicyTagRequest, - CreateTaxonomyRequest, - DeletePolicyTagRequest, - DeleteTaxonomyRequest, - GetPolicyTagRequest, - GetTaxonomyRequest, - ListPolicyTagsRequest, - ListPolicyTagsResponse, - ListTaxonomiesRequest, - ListTaxonomiesResponse, - PolicyTag, - Taxonomy, - UpdatePolicyTagRequest, - UpdateTaxonomyRequest, -) -from .policytagmanagerserialization import ( - ExportTaxonomiesRequest, - ExportTaxonomiesResponse, - ImportTaxonomiesRequest, - ImportTaxonomiesResponse, - InlineSource, - SerializedPolicyTag, - SerializedTaxonomy, -) -from .schema import ( - ColumnSchema, - Schema, -) -from .search import ( - SearchCatalogResult, - SearchResultType, -) -from .table_spec import ( - BigQueryDateShardedSpec, - BigQueryTableSpec, - TableSpec, - ViewSpec, - TableSourceType, -) -from .tags import ( - FieldType, - Tag, - TagField, - TagTemplate, - TagTemplateField, -) -from .timestamps import ( - SystemTimestamps, -) -from .usage import ( - UsageSignal, - UsageStats, -) - -__all__ = ( - 'IntegratedSystem', - 'ManagingSystem', - 'CreateEntryGroupRequest', - 'CreateEntryRequest', - 'CreateTagRequest', - 'CreateTagTemplateFieldRequest', - 'CreateTagTemplateRequest', - 'DeleteEntryGroupRequest', - 'DeleteEntryRequest', - 'DeleteTagRequest', - 'DeleteTagTemplateFieldRequest', - 'DeleteTagTemplateRequest', - 'Entry', - 'EntryGroup', - 'GetEntryGroupRequest', - 'GetEntryRequest', - 'GetTagTemplateRequest', - 'ListEntriesRequest', - 'ListEntriesResponse', - 'ListEntryGroupsRequest', - 'ListEntryGroupsResponse', - 'ListTagsRequest', - 'ListTagsResponse', - 'LookupEntryRequest', - 'RenameTagTemplateFieldEnumValueRequest', - 'RenameTagTemplateFieldRequest', - 'SearchCatalogRequest', - 'SearchCatalogResponse', - 'UpdateEntryGroupRequest', - 'UpdateEntryRequest', - 'UpdateTagRequest', - 'UpdateTagTemplateFieldRequest', - 'UpdateTagTemplateRequest', - 'EntryType', - 'GcsFilesetSpec', - 'GcsFileSpec', - 'CreatePolicyTagRequest', - 'CreateTaxonomyRequest', - 'DeletePolicyTagRequest', - 'DeleteTaxonomyRequest', - 'GetPolicyTagRequest', - 'GetTaxonomyRequest', - 'ListPolicyTagsRequest', - 'ListPolicyTagsResponse', - 'ListTaxonomiesRequest', - 'ListTaxonomiesResponse', - 'PolicyTag', - 'Taxonomy', - 'UpdatePolicyTagRequest', - 'UpdateTaxonomyRequest', - 'ExportTaxonomiesRequest', - 'ExportTaxonomiesResponse', - 'ImportTaxonomiesRequest', - 'ImportTaxonomiesResponse', - 'InlineSource', - 'SerializedPolicyTag', - 'SerializedTaxonomy', - 'ColumnSchema', - 'Schema', - 'SearchCatalogResult', - 'SearchResultType', - 'BigQueryDateShardedSpec', - 'BigQueryTableSpec', - 'TableSpec', - 'ViewSpec', - 'TableSourceType', - 'FieldType', - 'Tag', - 'TagField', - 'TagTemplate', - 'TagTemplateField', - 'SystemTimestamps', - 'UsageSignal', - 'UsageStats', -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/common.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/common.py deleted file mode 100644 index 2ba43f9f577f..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/common.py +++ /dev/null @@ -1,66 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1beta1', - manifest={ - 'IntegratedSystem', - 'ManagingSystem', - }, -) - - -class IntegratedSystem(proto.Enum): - r"""This enum describes all the possible systems that Data - Catalog integrates with. - - Values: - INTEGRATED_SYSTEM_UNSPECIFIED (0): - Default unknown system. - BIGQUERY (1): - BigQuery. - CLOUD_PUBSUB (2): - Cloud Pub/Sub. - """ - INTEGRATED_SYSTEM_UNSPECIFIED = 0 - BIGQUERY = 1 - CLOUD_PUBSUB = 2 - - -class ManagingSystem(proto.Enum): - r"""This enum describes all the systems that manage - Taxonomy and PolicyTag resources in DataCatalog. - - Values: - MANAGING_SYSTEM_UNSPECIFIED (0): - Default value - MANAGING_SYSTEM_DATAPLEX (1): - Dataplex. - MANAGING_SYSTEM_OTHER (2): - Other - """ - MANAGING_SYSTEM_UNSPECIFIED = 0 - MANAGING_SYSTEM_DATAPLEX = 1 - MANAGING_SYSTEM_OTHER = 2 - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/datacatalog.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/datacatalog.py deleted file mode 100644 index 28f0c21feae7..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/datacatalog.py +++ /dev/null @@ -1,1363 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.datacatalog_v1beta1.types import common -from google.cloud.datacatalog_v1beta1.types import gcs_fileset_spec as gcd_gcs_fileset_spec -from google.cloud.datacatalog_v1beta1.types import schema as gcd_schema -from google.cloud.datacatalog_v1beta1.types import search -from google.cloud.datacatalog_v1beta1.types import table_spec -from google.cloud.datacatalog_v1beta1.types import tags as gcd_tags -from google.cloud.datacatalog_v1beta1.types import timestamps -from google.cloud.datacatalog_v1beta1.types import usage -from google.protobuf import field_mask_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1beta1', - manifest={ - 'EntryType', - 'SearchCatalogRequest', - 'SearchCatalogResponse', - 'CreateEntryGroupRequest', - 'UpdateEntryGroupRequest', - 'GetEntryGroupRequest', - 'DeleteEntryGroupRequest', - 'ListEntryGroupsRequest', - 'ListEntryGroupsResponse', - 'CreateEntryRequest', - 'UpdateEntryRequest', - 'DeleteEntryRequest', - 'GetEntryRequest', - 'LookupEntryRequest', - 'Entry', - 'EntryGroup', - 'CreateTagTemplateRequest', - 'GetTagTemplateRequest', - 'UpdateTagTemplateRequest', - 'DeleteTagTemplateRequest', - 'CreateTagRequest', - 'UpdateTagRequest', - 'DeleteTagRequest', - 'CreateTagTemplateFieldRequest', - 'UpdateTagTemplateFieldRequest', - 'RenameTagTemplateFieldRequest', - 'RenameTagTemplateFieldEnumValueRequest', - 'DeleteTagTemplateFieldRequest', - 'ListTagsRequest', - 'ListTagsResponse', - 'ListEntriesRequest', - 'ListEntriesResponse', - }, -) - - -class EntryType(proto.Enum): - r"""Entry resources in Data Catalog can be of different types e.g. a - BigQuery Table entry is of type ``TABLE``. This enum describes all - the possible types Data Catalog contains. - - Values: - ENTRY_TYPE_UNSPECIFIED (0): - Default unknown type. - TABLE (2): - Output only. The type of entry that has a - GoogleSQL schema, including logical views. - MODEL (5): - Output only. The type of models. - https://cloud.google.com/bigquery-ml/docs/bigqueryml-intro - DATA_STREAM (3): - Output only. An entry type which is used for - streaming entries. Example: Pub/Sub topic. - FILESET (4): - An entry type which is a set of files or - objects. Example: Cloud Storage fileset. - """ - ENTRY_TYPE_UNSPECIFIED = 0 - TABLE = 2 - MODEL = 5 - DATA_STREAM = 3 - FILESET = 4 - - -class SearchCatalogRequest(proto.Message): - r"""Request message for - [SearchCatalog][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog]. - - Attributes: - scope (google.cloud.datacatalog_v1beta1.types.SearchCatalogRequest.Scope): - Required. The scope of this search request. A ``scope`` that - has empty ``include_org_ids``, ``include_project_ids`` AND - false ``include_gcp_public_datasets`` is considered invalid. - Data Catalog will return an error in such a case. - query (str): - Optional. The query string in search query syntax. An empty - query string will result in all data assets (in the - specified scope) that the user has access to. Query strings - can be simple as "x" or more qualified as: - - - name:x - - column:x - - description:y - - Note: Query tokens need to have a minimum of 3 characters - for substring matching to work correctly. See `Data Catalog - Search - Syntax `__ - for more information. - page_size (int): - Number of results in the search page. If <=0 then defaults - to 10. Max limit for page_size is 1000. Throws an invalid - argument for page_size > 1000. - page_token (str): - Optional. Pagination token returned in an earlier - [SearchCatalogResponse.next_page_token][google.cloud.datacatalog.v1beta1.SearchCatalogResponse.next_page_token], - which indicates that this is a continuation of a prior - [SearchCatalogRequest][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog] - call, and that the system should return the next page of - data. If empty, the first page is returned. - order_by (str): - Specifies the ordering of results, currently supported - case-sensitive choices are: - - - ``relevance``, only supports descending - - ``last_modified_timestamp [asc|desc]``, defaults to - descending if not specified - - ``default`` that can only be descending - - If not specified, defaults to ``relevance`` descending. - """ - - class Scope(proto.Message): - r"""The criteria that select the subspace used for query - matching. - - Attributes: - include_org_ids (MutableSequence[str]): - The list of organization IDs to search - within. To find your organization ID, follow - instructions in - https://cloud.google.com/resource-manager/docs/creating-managing-organization. - include_project_ids (MutableSequence[str]): - The list of project IDs to search within. To - learn more about the distinction between project - names/IDs/numbers, go to - https://cloud.google.com/docs/overview/#projects. - include_gcp_public_datasets (bool): - If ``true``, include Google Cloud public datasets in the - search results. Info on Google Cloud public datasets is - available at https://cloud.google.com/public-datasets/. By - default, Google Cloud public datasets are excluded. - restricted_locations (MutableSequence[str]): - Optional. The list of locations to search within. - - 1. If empty, search will be performed in all locations; - 2. If any of the locations are NOT in the valid locations - list, error will be returned; - 3. Otherwise, search only the given locations for matching - results. Typical usage is to leave this field empty. When - a location is unreachable as returned in the - ``SearchCatalogResponse.unreachable`` field, users can - repeat the search request with this parameter set to get - additional information on the error. - - Valid locations: - - - asia-east1 - - asia-east2 - - asia-northeast1 - - asia-northeast2 - - asia-northeast3 - - asia-south1 - - asia-southeast1 - - australia-southeast1 - - eu - - europe-north1 - - europe-west1 - - europe-west2 - - europe-west3 - - europe-west4 - - europe-west6 - - global - - northamerica-northeast1 - - southamerica-east1 - - us - - us-central1 - - us-east1 - - us-east4 - - us-west1 - - us-west2 - """ - - include_org_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - include_project_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - include_gcp_public_datasets: bool = proto.Field( - proto.BOOL, - number=7, - ) - restricted_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=16, - ) - - scope: Scope = proto.Field( - proto.MESSAGE, - number=6, - message=Scope, - ) - query: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class SearchCatalogResponse(proto.Message): - r"""Response message for - [SearchCatalog][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog]. - - Attributes: - results (MutableSequence[google.cloud.datacatalog_v1beta1.types.SearchCatalogResult]): - Search results. - total_size (int): - The approximate total number of entries - matched by the query. - next_page_token (str): - The token that can be used to retrieve the - next page of results. - unreachable (MutableSequence[str]): - Unreachable locations. Search result does not include data - from those locations. Users can get additional information - on the error by repeating the search request with a more - restrictive parameter -- setting the value for - ``SearchDataCatalogRequest.scope.restricted_locations``. - """ - - @property - def raw_page(self): - return self - - results: MutableSequence[search.SearchCatalogResult] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=search.SearchCatalogResult, - ) - total_size: int = proto.Field( - proto.INT32, - number=2, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=3, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - - -class CreateEntryGroupRequest(proto.Message): - r"""Request message for - [CreateEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntryGroup]. - - Attributes: - parent (str): - Required. The name of the project this entry group is in. - Example: - - - projects/{project_id}/locations/{location} - - Note that this EntryGroup and its child resources may not - actually be stored in the location in this name. - entry_group_id (str): - Required. The id of the entry group to - create. The id must begin with a letter or - underscore, contain only English letters, - numbers and underscores, and be at most 64 - characters. - entry_group (google.cloud.datacatalog_v1beta1.types.EntryGroup): - The entry group to create. Defaults to an - empty entry group. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - entry_group_id: str = proto.Field( - proto.STRING, - number=3, - ) - entry_group: 'EntryGroup' = proto.Field( - proto.MESSAGE, - number=2, - message='EntryGroup', - ) - - -class UpdateEntryGroupRequest(proto.Message): - r"""Request message for - [UpdateEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntryGroup]. - - Attributes: - entry_group (google.cloud.datacatalog_v1beta1.types.EntryGroup): - Required. The updated entry group. "name" - field must be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Names of fields whose values to overwrite on - an entry group. - If this parameter is absent or empty, all - modifiable fields are overwritten. If such - fields are non-required and omitted in the - request body, their values are emptied. - """ - - entry_group: 'EntryGroup' = proto.Field( - proto.MESSAGE, - number=1, - message='EntryGroup', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class GetEntryGroupRequest(proto.Message): - r"""Request message for - [GetEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.GetEntryGroup]. - - Attributes: - name (str): - Required. The name of the entry group. For example, - ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}``. - read_mask (google.protobuf.field_mask_pb2.FieldMask): - The fields to return. If not set or empty, - all fields are returned. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - read_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteEntryGroupRequest(proto.Message): - r"""Request message for - [DeleteEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntryGroup]. - - Attributes: - name (str): - Required. The name of the entry group. For example, - ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}``. - force (bool): - Optional. If true, deletes all entries in the - entry group. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - force: bool = proto.Field( - proto.BOOL, - number=2, - ) - - -class ListEntryGroupsRequest(proto.Message): - r"""Request message for - [ListEntryGroups][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntryGroups]. - - Attributes: - parent (str): - Required. The name of the location that contains the entry - groups, which can be provided in URL format. Example: - - - projects/{project_id}/locations/{location} - page_size (int): - Optional. The maximum number of items to return. Default is - 10. Max limit is 1000. Throws an invalid argument for - ``page_size > 1000``. - page_token (str): - Optional. Token that specifies which page is - requested. If empty, the first page is returned. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListEntryGroupsResponse(proto.Message): - r"""Response message for - [ListEntryGroups][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntryGroups]. - - Attributes: - entry_groups (MutableSequence[google.cloud.datacatalog_v1beta1.types.EntryGroup]): - EntryGroup details. - next_page_token (str): - Token to retrieve the next page of results. - It is set to empty if no items remain in - results. - """ - - @property - def raw_page(self): - return self - - entry_groups: MutableSequence['EntryGroup'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='EntryGroup', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateEntryRequest(proto.Message): - r"""Request message for - [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry]. - - Attributes: - parent (str): - Required. The name of the entry group this entry is in. - Example: - - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} - - Note that this Entry and its child resources may not - actually be stored in the location in this name. - entry_id (str): - Required. The id of the entry to create. - entry (google.cloud.datacatalog_v1beta1.types.Entry): - Required. The entry to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - entry_id: str = proto.Field( - proto.STRING, - number=3, - ) - entry: 'Entry' = proto.Field( - proto.MESSAGE, - number=2, - message='Entry', - ) - - -class UpdateEntryRequest(proto.Message): - r"""Request message for - [UpdateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntry]. - - Attributes: - entry (google.cloud.datacatalog_v1beta1.types.Entry): - Required. The updated entry. The "name" field - must be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Names of fields whose values to overwrite on an entry. - - If this parameter is absent or empty, all modifiable fields - are overwritten. If such fields are non-required and omitted - in the request body, their values are emptied. - - The following fields are modifiable: - - - For entries with type ``DATA_STREAM``: - - - ``schema`` - - - For entries with type ``FILESET``: - - - ``schema`` - - ``display_name`` - - ``description`` - - ``gcs_fileset_spec`` - - ``gcs_fileset_spec.file_patterns`` - - - For entries with ``user_specified_type``: - - - ``schema`` - - ``display_name`` - - ``description`` - - ``user_specified_type`` - - ``user_specified_system`` - - ``linked_resource`` - - ``source_system_timestamps`` - """ - - entry: 'Entry' = proto.Field( - proto.MESSAGE, - number=1, - message='Entry', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteEntryRequest(proto.Message): - r"""Request message for - [DeleteEntry][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntry]. - - Attributes: - name (str): - Required. The name of the entry. Example: - - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetEntryRequest(proto.Message): - r"""Request message for - [GetEntry][google.cloud.datacatalog.v1beta1.DataCatalog.GetEntry]. - - Attributes: - name (str): - Required. The name of the entry. Example: - - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class LookupEntryRequest(proto.Message): - r"""Request message for - [LookupEntry][google.cloud.datacatalog.v1beta1.DataCatalog.LookupEntry]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - linked_resource (str): - The full name of the Google Cloud Platform resource the Data - Catalog entry represents. See: - https://cloud.google.com/apis/design/resource_names#full_resource_name. - Full names are case-sensitive. - - Examples: - - - //bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId - - //pubsub.googleapis.com/projects/projectId/topics/topicId - - This field is a member of `oneof`_ ``target_name``. - sql_resource (str): - The SQL name of the entry. SQL names are case-sensitive. - - Examples: - - - ``pubsub.project_id.topic_id`` - - :literal:`pubsub.project_id.`topic.id.with.dots\`` - - ``bigquery.table.project_id.dataset_id.table_id`` - - ``bigquery.dataset.project_id.dataset_id`` - - ``datacatalog.entry.project_id.location_id.entry_group_id.entry_id`` - - ``*_id``\ s should satisfy the standard SQL rules for - identifiers. - https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical. - - This field is a member of `oneof`_ ``target_name``. - """ - - linked_resource: str = proto.Field( - proto.STRING, - number=1, - oneof='target_name', - ) - sql_resource: str = proto.Field( - proto.STRING, - number=3, - oneof='target_name', - ) - - -class Entry(proto.Message): - r"""Entry Metadata. A Data Catalog Entry resource represents another - resource in Google Cloud Platform (such as a BigQuery dataset or a - Pub/Sub topic), or outside of Google Cloud Platform. Clients can use - the ``linked_resource`` field in the Entry resource to refer to the - original resource ID of the source system. - - An Entry resource contains resource details, such as its schema. An - Entry can also be used to attach flexible metadata, such as a - [Tag][google.cloud.datacatalog.v1beta1.Tag]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. The Data Catalog resource name of the entry in - URL format. Example: - - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} - - Note that this Entry and its child resources may not - actually be stored in the location in this name. - linked_resource (str): - The resource this metadata entry refers to. - - For Google Cloud Platform resources, ``linked_resource`` is - the `full name of the - resource `__. - For example, the ``linked_resource`` for a table resource - from BigQuery is: - - - //bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId - - Output only when Entry is of type in the EntryType enum. For - entries with user_specified_type, this field is optional and - defaults to an empty string. - type_ (google.cloud.datacatalog_v1beta1.types.EntryType): - The type of the entry. - Only used for Entries with types in the - EntryType enum. - - This field is a member of `oneof`_ ``entry_type``. - user_specified_type (str): - Entry type if it does not fit any of the input-allowed - values listed in ``EntryType`` enum above. When creating an - entry, users should check the enum values first, if nothing - matches the entry to be created, then provide a custom - value, for example "my_special_type". - ``user_specified_type`` strings must begin with a letter or - underscore and can only contain letters, numbers, and - underscores; are case insensitive; must be at least 1 - character and at most 64 characters long. - - Currently, only FILESET enum value is allowed. All other - entries created through Data Catalog must use - ``user_specified_type``. - - This field is a member of `oneof`_ ``entry_type``. - integrated_system (google.cloud.datacatalog_v1beta1.types.IntegratedSystem): - Output only. This field indicates the entry's - source system that Data Catalog integrates with, - such as BigQuery or Pub/Sub. - - This field is a member of `oneof`_ ``system``. - user_specified_system (str): - This field indicates the entry's source system that Data - Catalog does not integrate with. ``user_specified_system`` - strings must begin with a letter or underscore and can only - contain letters, numbers, and underscores; are case - insensitive; must be at least 1 character and at most 64 - characters long. - - This field is a member of `oneof`_ ``system``. - gcs_fileset_spec (google.cloud.datacatalog_v1beta1.types.GcsFilesetSpec): - Specification that applies to a Cloud Storage - fileset. This is only valid on entries of type - FILESET. - - This field is a member of `oneof`_ ``type_spec``. - bigquery_table_spec (google.cloud.datacatalog_v1beta1.types.BigQueryTableSpec): - Specification that applies to a BigQuery table. This is only - valid on entries of type ``TABLE``. - - This field is a member of `oneof`_ ``type_spec``. - bigquery_date_sharded_spec (google.cloud.datacatalog_v1beta1.types.BigQueryDateShardedSpec): - Specification for a group of BigQuery tables with name - pattern ``[prefix]YYYYMMDD``. Context: - https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning_versus_sharding. - - This field is a member of `oneof`_ ``type_spec``. - display_name (str): - Display information such as title and - description. A short name to identify the entry, - for example, "Analytics Data - Jan 2011". - Default value is an empty string. - description (str): - Entry description, which can consist of - several sentences or paragraphs that describe - entry contents. Default value is an empty - string. - schema (google.cloud.datacatalog_v1beta1.types.Schema): - Schema of the entry. An entry might not have - any schema attached to it. - source_system_timestamps (google.cloud.datacatalog_v1beta1.types.SystemTimestamps): - Output only. Timestamps about the underlying resource, not - about this Data Catalog entry. Output only when Entry is of - type in the EntryType enum. For entries with - user_specified_type, this field is optional and defaults to - an empty timestamp. - usage_signal (google.cloud.datacatalog_v1beta1.types.UsageSignal): - Output only. Statistics on the usage level of - the resource. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - linked_resource: str = proto.Field( - proto.STRING, - number=9, - ) - type_: 'EntryType' = proto.Field( - proto.ENUM, - number=2, - oneof='entry_type', - enum='EntryType', - ) - user_specified_type: str = proto.Field( - proto.STRING, - number=16, - oneof='entry_type', - ) - integrated_system: common.IntegratedSystem = proto.Field( - proto.ENUM, - number=17, - oneof='system', - enum=common.IntegratedSystem, - ) - user_specified_system: str = proto.Field( - proto.STRING, - number=18, - oneof='system', - ) - gcs_fileset_spec: gcd_gcs_fileset_spec.GcsFilesetSpec = proto.Field( - proto.MESSAGE, - number=6, - oneof='type_spec', - message=gcd_gcs_fileset_spec.GcsFilesetSpec, - ) - bigquery_table_spec: table_spec.BigQueryTableSpec = proto.Field( - proto.MESSAGE, - number=12, - oneof='type_spec', - message=table_spec.BigQueryTableSpec, - ) - bigquery_date_sharded_spec: table_spec.BigQueryDateShardedSpec = proto.Field( - proto.MESSAGE, - number=15, - oneof='type_spec', - message=table_spec.BigQueryDateShardedSpec, - ) - display_name: str = proto.Field( - proto.STRING, - number=3, - ) - description: str = proto.Field( - proto.STRING, - number=4, - ) - schema: gcd_schema.Schema = proto.Field( - proto.MESSAGE, - number=5, - message=gcd_schema.Schema, - ) - source_system_timestamps: timestamps.SystemTimestamps = proto.Field( - proto.MESSAGE, - number=7, - message=timestamps.SystemTimestamps, - ) - usage_signal: usage.UsageSignal = proto.Field( - proto.MESSAGE, - number=13, - message=usage.UsageSignal, - ) - - -class EntryGroup(proto.Message): - r"""EntryGroup Metadata. An EntryGroup resource represents a logical - grouping of zero or more Data Catalog - [Entry][google.cloud.datacatalog.v1beta1.Entry] resources. - - Attributes: - name (str): - The resource name of the entry group in URL format. Example: - - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} - - Note that this EntryGroup and its child resources may not - actually be stored in the location in this name. - display_name (str): - A short name to identify the entry group, for - example, "analytics data - jan 2011". Default - value is an empty string. - description (str): - Entry group description, which can consist of - several sentences or paragraphs that describe - entry group contents. Default value is an empty - string. - data_catalog_timestamps (google.cloud.datacatalog_v1beta1.types.SystemTimestamps): - Output only. Timestamps about this - EntryGroup. Default value is empty timestamps. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - data_catalog_timestamps: timestamps.SystemTimestamps = proto.Field( - proto.MESSAGE, - number=4, - message=timestamps.SystemTimestamps, - ) - - -class CreateTagTemplateRequest(proto.Message): - r"""Request message for - [CreateTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplate]. - - Attributes: - parent (str): - Required. The name of the project and the template location - [region](https://cloud.google.com/data-catalog/docs/concepts/regions. - - Example: - - - projects/{project_id}/locations/us-central1 - tag_template_id (str): - Required. The id of the tag template to - create. - tag_template (google.cloud.datacatalog_v1beta1.types.TagTemplate): - Required. The tag template to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - tag_template_id: str = proto.Field( - proto.STRING, - number=3, - ) - tag_template: gcd_tags.TagTemplate = proto.Field( - proto.MESSAGE, - number=2, - message=gcd_tags.TagTemplate, - ) - - -class GetTagTemplateRequest(proto.Message): - r"""Request message for - [GetTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.GetTagTemplate]. - - Attributes: - name (str): - Required. The name of the tag template. Example: - - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateTagTemplateRequest(proto.Message): - r"""Request message for - [UpdateTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplate]. - - Attributes: - tag_template (google.cloud.datacatalog_v1beta1.types.TagTemplate): - Required. The template to update. The "name" - field must be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Names of fields whose values to overwrite on a tag template. - Currently, only ``display_name`` can be overwritten. - - In general, if this parameter is absent or empty, all - modifiable fields are overwritten. If such fields are - non-required and omitted in the request body, their values - are emptied. - """ - - tag_template: gcd_tags.TagTemplate = proto.Field( - proto.MESSAGE, - number=1, - message=gcd_tags.TagTemplate, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteTagTemplateRequest(proto.Message): - r"""Request message for - [DeleteTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplate]. - - Attributes: - name (str): - Required. The name of the tag template to delete. Example: - - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} - force (bool): - Required. Currently, this field must always be set to - ``true``. This confirms the deletion of any possible tags - using this template. ``force = false`` will be supported in - the future. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - force: bool = proto.Field( - proto.BOOL, - number=2, - ) - - -class CreateTagRequest(proto.Message): - r"""Request message for - [CreateTag][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTag]. - - Attributes: - parent (str): - Required. The name of the resource to attach this tag to. - Tags can be attached to Entries. Example: - - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} - - Note that this Tag and its child resources may not actually - be stored in the location in this name. - tag (google.cloud.datacatalog_v1beta1.types.Tag): - Required. The tag to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - tag: gcd_tags.Tag = proto.Field( - proto.MESSAGE, - number=2, - message=gcd_tags.Tag, - ) - - -class UpdateTagRequest(proto.Message): - r"""Request message for - [UpdateTag][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTag]. - - Attributes: - tag (google.cloud.datacatalog_v1beta1.types.Tag): - Required. The updated tag. The "name" field - must be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Note: Currently, this parameter can only take ``"fields"`` - as value. - - Names of fields whose values to overwrite on a tag. - Currently, a tag has the only modifiable field with the name - ``fields``. - - In general, if this parameter is absent or empty, all - modifiable fields are overwritten. If such fields are - non-required and omitted in the request body, their values - are emptied. - """ - - tag: gcd_tags.Tag = proto.Field( - proto.MESSAGE, - number=1, - message=gcd_tags.Tag, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteTagRequest(proto.Message): - r"""Request message for - [DeleteTag][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTag]. - - Attributes: - name (str): - Required. The name of the tag to delete. Example: - - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateTagTemplateFieldRequest(proto.Message): - r"""Request message for - [CreateTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplateField]. - - Attributes: - parent (str): - Required. The name of the project and the template location - `region `__. - - Example: - - - projects/{project_id}/locations/us-central1/tagTemplates/{tag_template_id} - tag_template_field_id (str): - Required. The ID of the tag template field to create. Field - ids can contain letters (both uppercase and lowercase), - numbers (0-9), underscores (_) and dashes (-). Field IDs - must be at least 1 character long and at most 128 characters - long. Field IDs must also be unique within their template. - tag_template_field (google.cloud.datacatalog_v1beta1.types.TagTemplateField): - Required. The tag template field to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - tag_template_field_id: str = proto.Field( - proto.STRING, - number=2, - ) - tag_template_field: gcd_tags.TagTemplateField = proto.Field( - proto.MESSAGE, - number=3, - message=gcd_tags.TagTemplateField, - ) - - -class UpdateTagTemplateFieldRequest(proto.Message): - r"""Request message for - [UpdateTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplateField]. - - Attributes: - name (str): - Required. The name of the tag template field. Example: - - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} - tag_template_field (google.cloud.datacatalog_v1beta1.types.TagTemplateField): - Required. The template to update. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Names of fields whose values to overwrite on an - individual field of a tag template. The following fields are - modifiable: - - - ``display_name`` - - ``type.enum_type`` - - ``is_required`` - - If this parameter is absent or empty, all modifiable fields - are overwritten. If such fields are non-required and omitted - in the request body, their values are emptied with one - exception: when updating an enum type, the provided values - are merged with the existing values. Therefore, enum values - can only be added, existing enum values cannot be deleted or - renamed. - - Additionally, updating a template field from optional to - required is *not* allowed. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - tag_template_field: gcd_tags.TagTemplateField = proto.Field( - proto.MESSAGE, - number=2, - message=gcd_tags.TagTemplateField, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class RenameTagTemplateFieldRequest(proto.Message): - r"""Request message for - [RenameTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.RenameTagTemplateField]. - - Attributes: - name (str): - Required. The name of the tag template. Example: - - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} - new_tag_template_field_id (str): - Required. The new ID of this tag template field. For - example, ``my_new_field``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - new_tag_template_field_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class RenameTagTemplateFieldEnumValueRequest(proto.Message): - r"""Request message for - [RenameTagTemplateFieldEnumValue][google.cloud.datacatalog.v1.DataCatalog.RenameTagTemplateFieldEnumValue]. - - Attributes: - name (str): - Required. The name of the enum field value. Example: - - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name} - new_enum_value_display_name (str): - Required. The new display name of the enum value. For - example, ``my_new_enum_value``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - new_enum_value_display_name: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteTagTemplateFieldRequest(proto.Message): - r"""Request message for - [DeleteTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplateField]. - - Attributes: - name (str): - Required. The name of the tag template field to delete. - Example: - - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} - force (bool): - Required. Currently, this field must always be set to - ``true``. This confirms the deletion of this field from any - tags using this field. ``force = false`` will be supported - in the future. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - force: bool = proto.Field( - proto.BOOL, - number=2, - ) - - -class ListTagsRequest(proto.Message): - r"""Request message for - [ListTags][google.cloud.datacatalog.v1beta1.DataCatalog.ListTags]. - - Attributes: - parent (str): - Required. The name of the Data Catalog resource to list the - tags of. The resource could be an - [Entry][google.cloud.datacatalog.v1beta1.Entry] or an - [EntryGroup][google.cloud.datacatalog.v1beta1.EntryGroup]. - - Examples: - - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} - page_size (int): - The maximum number of tags to return. Default - is 10. Max limit is 1000. - page_token (str): - Token that specifies which page is requested. - If empty, the first page is returned. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListTagsResponse(proto.Message): - r"""Response message for - [ListTags][google.cloud.datacatalog.v1beta1.DataCatalog.ListTags]. - - Attributes: - tags (MutableSequence[google.cloud.datacatalog_v1beta1.types.Tag]): - [Tag][google.cloud.datacatalog.v1beta1.Tag] details. - next_page_token (str): - Token to retrieve the next page of results. - It is set to empty if no items remain in - results. - """ - - @property - def raw_page(self): - return self - - tags: MutableSequence[gcd_tags.Tag] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gcd_tags.Tag, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListEntriesRequest(proto.Message): - r"""Request message for - [ListEntries][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntries]. - - Attributes: - parent (str): - Required. The name of the entry group that contains the - entries, which can be provided in URL format. Example: - - - projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} - page_size (int): - The maximum number of items to return. Default is 10. Max - limit is 1000. Throws an invalid argument for - ``page_size > 1000``. - page_token (str): - Token that specifies which page is requested. - If empty, the first page is returned. - read_mask (google.protobuf.field_mask_pb2.FieldMask): - The fields to return for each Entry. If not set or empty, - all fields are returned. For example, setting read_mask to - contain only one path "name" will cause ListEntries to - return a list of Entries with only "name" field. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - read_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=4, - message=field_mask_pb2.FieldMask, - ) - - -class ListEntriesResponse(proto.Message): - r"""Response message for - [ListEntries][google.cloud.datacatalog.v1beta1.DataCatalog.ListEntries]. - - Attributes: - entries (MutableSequence[google.cloud.datacatalog_v1beta1.types.Entry]): - Entry details. - next_page_token (str): - Token to retrieve the next page of results. - It is set to empty if no items remain in - results. - """ - - @property - def raw_page(self): - return self - - entries: MutableSequence['Entry'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Entry', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/gcs_fileset_spec.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/gcs_fileset_spec.py deleted file mode 100644 index 9918d07ef189..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/gcs_fileset_spec.py +++ /dev/null @@ -1,117 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.datacatalog_v1beta1.types import timestamps - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1beta1', - manifest={ - 'GcsFilesetSpec', - 'GcsFileSpec', - }, -) - - -class GcsFilesetSpec(proto.Message): - r"""Describes a Cloud Storage fileset entry. - - Attributes: - file_patterns (MutableSequence[str]): - Required. Patterns to identify a set of files in Google - Cloud Storage. See `Cloud Storage - documentation `__ - for more information. Note that bucket wildcards are - currently not supported. - - Examples of valid file_patterns: - - - ``gs://bucket_name/dir/*``: matches all files within - ``bucket_name/dir`` directory. - - ``gs://bucket_name/dir/**``: matches all files in - ``bucket_name/dir`` spanning all subdirectories. - - ``gs://bucket_name/file*``: matches files prefixed by - ``file`` in ``bucket_name`` - - ``gs://bucket_name/??.txt``: matches files with two - characters followed by ``.txt`` in ``bucket_name`` - - ``gs://bucket_name/[aeiou].txt``: matches files that - contain a single vowel character followed by ``.txt`` in - ``bucket_name`` - - ``gs://bucket_name/[a-m].txt``: matches files that - contain ``a``, ``b``, ... or ``m`` followed by ``.txt`` - in ``bucket_name`` - - ``gs://bucket_name/a/*/b``: matches all files in - ``bucket_name`` that match ``a/*/b`` pattern, such as - ``a/c/b``, ``a/d/b`` - - ``gs://another_bucket/a.txt``: matches - ``gs://another_bucket/a.txt`` - - You can combine wildcards to provide more powerful matches, - for example: - - - ``gs://bucket_name/[a-m]??.j*g`` - sample_gcs_file_specs (MutableSequence[google.cloud.datacatalog_v1beta1.types.GcsFileSpec]): - Output only. Sample files contained in this - fileset, not all files contained in this fileset - are represented here. - """ - - file_patterns: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - sample_gcs_file_specs: MutableSequence['GcsFileSpec'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='GcsFileSpec', - ) - - -class GcsFileSpec(proto.Message): - r"""Specifications of a single file in Cloud Storage. - - Attributes: - file_path (str): - Required. The full file path. Example: - ``gs://bucket_name/a/b.txt``. - gcs_timestamps (google.cloud.datacatalog_v1beta1.types.SystemTimestamps): - Output only. Timestamps about the Cloud - Storage file. - size_bytes (int): - Output only. The size of the file, in bytes. - """ - - file_path: str = proto.Field( - proto.STRING, - number=1, - ) - gcs_timestamps: timestamps.SystemTimestamps = proto.Field( - proto.MESSAGE, - number=2, - message=timestamps.SystemTimestamps, - ) - size_bytes: int = proto.Field( - proto.INT64, - number=4, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/policytagmanager.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/policytagmanager.py deleted file mode 100644 index 112b99a8a486..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/policytagmanager.py +++ /dev/null @@ -1,520 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.datacatalog_v1beta1.types import common -from google.cloud.datacatalog_v1beta1.types import timestamps -from google.protobuf import field_mask_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1beta1', - manifest={ - 'Taxonomy', - 'PolicyTag', - 'CreateTaxonomyRequest', - 'DeleteTaxonomyRequest', - 'UpdateTaxonomyRequest', - 'ListTaxonomiesRequest', - 'ListTaxonomiesResponse', - 'GetTaxonomyRequest', - 'CreatePolicyTagRequest', - 'DeletePolicyTagRequest', - 'UpdatePolicyTagRequest', - 'ListPolicyTagsRequest', - 'ListPolicyTagsResponse', - 'GetPolicyTagRequest', - }, -) - - -class Taxonomy(proto.Message): - r"""A taxonomy is a collection of policy tags that classify data along a - common axis. For instance a data *sensitivity* taxonomy could - contain policy tags denoting PII such as age, zipcode, and SSN. A - data *origin* taxonomy could contain policy tags to distinguish user - data, employee data, partner data, public data. - - Attributes: - name (str): - Output only. Resource name of this taxonomy, whose format - is: - "projects/{project_number}/locations/{location_id}/taxonomies/{id}". - display_name (str): - Required. User defined name of this taxonomy. - It must: contain only unicode letters, numbers, - underscores, dashes and spaces; not start or end - with spaces; and be at most 200 bytes long when - encoded in UTF-8. - - The taxonomy display name must be unique within - an organization. - description (str): - Optional. Description of this taxonomy. It - must: contain only unicode characters, tabs, - newlines, carriage returns and page breaks; and - be at most 2000 bytes long when encoded in - UTF-8. If not set, defaults to an empty - description. - policy_tag_count (int): - Output only. Number of policy tags contained - in this taxonomy. - taxonomy_timestamps (google.cloud.datacatalog_v1beta1.types.SystemTimestamps): - Output only. Timestamps about this taxonomy. Only - create_time and update_time are used. - activated_policy_types (MutableSequence[google.cloud.datacatalog_v1beta1.types.Taxonomy.PolicyType]): - Optional. A list of policy types that are - activated for this taxonomy. If not set, - defaults to an empty list. - service (google.cloud.datacatalog_v1beta1.types.Taxonomy.Service): - Output only. Identity of the service which - owns the Taxonomy. This field is only populated - when the taxonomy is created by a Google Cloud - service. Currently only 'DATAPLEX' is supported. - """ - class PolicyType(proto.Enum): - r"""Defines policy types where policy tag can be used for. - - Values: - POLICY_TYPE_UNSPECIFIED (0): - Unspecified policy type. - FINE_GRAINED_ACCESS_CONTROL (1): - Fine grained access control policy, which - enables access control on tagged resources. - """ - POLICY_TYPE_UNSPECIFIED = 0 - FINE_GRAINED_ACCESS_CONTROL = 1 - - class Service(proto.Message): - r"""The source system of the Taxonomy. - - Attributes: - name (google.cloud.datacatalog_v1beta1.types.ManagingSystem): - The Google Cloud service name. - identity (str): - The service agent for the service. - """ - - name: common.ManagingSystem = proto.Field( - proto.ENUM, - number=1, - enum=common.ManagingSystem, - ) - identity: str = proto.Field( - proto.STRING, - number=2, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - policy_tag_count: int = proto.Field( - proto.INT32, - number=4, - ) - taxonomy_timestamps: timestamps.SystemTimestamps = proto.Field( - proto.MESSAGE, - number=5, - message=timestamps.SystemTimestamps, - ) - activated_policy_types: MutableSequence[PolicyType] = proto.RepeatedField( - proto.ENUM, - number=6, - enum=PolicyType, - ) - service: Service = proto.Field( - proto.MESSAGE, - number=7, - message=Service, - ) - - -class PolicyTag(proto.Message): - r"""Denotes one policy tag in a taxonomy (e.g. ssn). Policy Tags - can be defined in a hierarchy. For example, consider the - following hierarchy: - - Geolocation -> (LatLong, City, ZipCode). PolicyTag - "Geolocation" contains three child policy tags: "LatLong", - "City", and "ZipCode". - - Attributes: - name (str): - Output only. Resource name of this policy tag, whose format - is: - "projects/{project_number}/locations/{location_id}/taxonomies/{taxonomy_id}/policyTags/{id}". - display_name (str): - Required. User defined name of this policy - tag. It must: be unique within the parent - taxonomy; contain only unicode letters, numbers, - underscores, dashes and spaces; not start or end - with spaces; and be at most 200 bytes long when - encoded in UTF-8. - description (str): - Description of this policy tag. It must: - contain only unicode characters, tabs, newlines, - carriage returns and page breaks; and be at most - 2000 bytes long when encoded in UTF-8. If not - set, defaults to an empty description. If not - set, defaults to an empty description. - parent_policy_tag (str): - Resource name of this policy tag's parent - policy tag (e.g. for the "LatLong" policy tag in - the example above, this field contains the - resource name of the "Geolocation" policy tag). - If empty, it means this policy tag is a top - level policy tag (e.g. this field is empty for - the "Geolocation" policy tag in the example - above). If not set, defaults to an empty string. - child_policy_tags (MutableSequence[str]): - Output only. Resource names of child policy - tags of this policy tag. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - parent_policy_tag: str = proto.Field( - proto.STRING, - number=4, - ) - child_policy_tags: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - - -class CreateTaxonomyRequest(proto.Message): - r"""Request message for - [CreateTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.CreateTaxonomy]. - - Attributes: - parent (str): - Required. Resource name of the project that - the taxonomy will belong to. - taxonomy (google.cloud.datacatalog_v1beta1.types.Taxonomy): - The taxonomy to be created. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - taxonomy: 'Taxonomy' = proto.Field( - proto.MESSAGE, - number=2, - message='Taxonomy', - ) - - -class DeleteTaxonomyRequest(proto.Message): - r"""Request message for - [DeleteTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.DeleteTaxonomy]. - - Attributes: - name (str): - Required. Resource name of the taxonomy to be - deleted. All policy tags in this taxonomy will - also be deleted. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateTaxonomyRequest(proto.Message): - r"""Request message for - [UpdateTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.UpdateTaxonomy]. - - Attributes: - taxonomy (google.cloud.datacatalog_v1beta1.types.Taxonomy): - The taxonomy to update. Only description, display_name, and - activated policy types can be updated. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The update mask applies to the resource. For the - ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask - If not set, defaults to all of the fields that are allowed - to update. - """ - - taxonomy: 'Taxonomy' = proto.Field( - proto.MESSAGE, - number=1, - message='Taxonomy', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class ListTaxonomiesRequest(proto.Message): - r"""Request message for - [ListTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListTaxonomies]. - - Attributes: - parent (str): - Required. Resource name of the project to - list the taxonomies of. - page_size (int): - The maximum number of items to return. Must - be a value between 1 and 1000. If not set, - defaults to 50. - page_token (str): - The next_page_token value returned from a previous list - request, if any. If not set, defaults to an empty string. - filter (str): - Supported field for filter is 'service' and - value is 'dataplex'. Eg: service=dataplex. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListTaxonomiesResponse(proto.Message): - r"""Response message for - [ListTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListTaxonomies]. - - Attributes: - taxonomies (MutableSequence[google.cloud.datacatalog_v1beta1.types.Taxonomy]): - Taxonomies that the project contains. - next_page_token (str): - Token used to retrieve the next page of - results, or empty if there are no more results - in the list. - """ - - @property - def raw_page(self): - return self - - taxonomies: MutableSequence['Taxonomy'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Taxonomy', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetTaxonomyRequest(proto.Message): - r"""Request message for - [GetTaxonomy][google.cloud.datacatalog.v1beta1.PolicyTagManager.GetTaxonomy]. - - Attributes: - name (str): - Required. Resource name of the requested - taxonomy. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreatePolicyTagRequest(proto.Message): - r"""Request message for - [CreatePolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.CreatePolicyTag]. - - Attributes: - parent (str): - Required. Resource name of the taxonomy that - the policy tag will belong to. - policy_tag (google.cloud.datacatalog_v1beta1.types.PolicyTag): - The policy tag to be created. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - policy_tag: 'PolicyTag' = proto.Field( - proto.MESSAGE, - number=2, - message='PolicyTag', - ) - - -class DeletePolicyTagRequest(proto.Message): - r"""Request message for - [DeletePolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.DeletePolicyTag]. - - Attributes: - name (str): - Required. Resource name of the policy tag to - be deleted. All of its descendant policy tags - will also be deleted. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdatePolicyTagRequest(proto.Message): - r"""Request message for - [UpdatePolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.UpdatePolicyTag]. - - Attributes: - policy_tag (google.cloud.datacatalog_v1beta1.types.PolicyTag): - The policy tag to update. Only the description, - display_name, and parent_policy_tag fields can be updated. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The update mask applies to the resource. Only display_name, - description and parent_policy_tag can be updated and thus - can be listed in the mask. If update_mask is not provided, - all allowed fields (i.e. display_name, description and - parent) will be updated. For more information including the - ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask - If not set, defaults to all of the fields that are allowed - to update. - """ - - policy_tag: 'PolicyTag' = proto.Field( - proto.MESSAGE, - number=1, - message='PolicyTag', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class ListPolicyTagsRequest(proto.Message): - r"""Request message for - [ListPolicyTags][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListPolicyTags]. - - Attributes: - parent (str): - Required. Resource name of the taxonomy to - list the policy tags of. - page_size (int): - The maximum number of items to return. Must - be a value between 1 and 1000. If not set, - defaults to 50. - page_token (str): - The next_page_token value returned from a previous List - request, if any. If not set, defaults to an empty string. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListPolicyTagsResponse(proto.Message): - r"""Response message for - [ListPolicyTags][google.cloud.datacatalog.v1beta1.PolicyTagManager.ListPolicyTags]. - - Attributes: - policy_tags (MutableSequence[google.cloud.datacatalog_v1beta1.types.PolicyTag]): - The policy tags that are in the requested - taxonomy. - next_page_token (str): - Token used to retrieve the next page of - results, or empty if there are no more results - in the list. - """ - - @property - def raw_page(self): - return self - - policy_tags: MutableSequence['PolicyTag'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='PolicyTag', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetPolicyTagRequest(proto.Message): - r"""Request message for - [GetPolicyTag][google.cloud.datacatalog.v1beta1.PolicyTagManager.GetPolicyTag]. - - Attributes: - name (str): - Required. Resource name of the requested - policy tag. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py deleted file mode 100644 index 2ea1f5bc52ab..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py +++ /dev/null @@ -1,234 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.datacatalog_v1beta1.types import policytagmanager - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1beta1', - manifest={ - 'SerializedTaxonomy', - 'SerializedPolicyTag', - 'ImportTaxonomiesRequest', - 'InlineSource', - 'ImportTaxonomiesResponse', - 'ExportTaxonomiesRequest', - 'ExportTaxonomiesResponse', - }, -) - - -class SerializedTaxonomy(proto.Message): - r"""Message capturing a taxonomy and its policy tag hierarchy as - a nested proto. Used for taxonomy import/export and mutation. - - Attributes: - display_name (str): - Required. Display name of the taxonomy. Max - 200 bytes when encoded in UTF-8. - description (str): - Description of the serialized taxonomy. The - length of the description is limited to 2000 - bytes when encoded in UTF-8. If not set, - defaults to an empty description. - policy_tags (MutableSequence[google.cloud.datacatalog_v1beta1.types.SerializedPolicyTag]): - Top level policy tags associated with the - taxonomy if any. - activated_policy_types (MutableSequence[google.cloud.datacatalog_v1beta1.types.Taxonomy.PolicyType]): - A list of policy types that are activated for - a taxonomy. - """ - - display_name: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - policy_tags: MutableSequence['SerializedPolicyTag'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='SerializedPolicyTag', - ) - activated_policy_types: MutableSequence[policytagmanager.Taxonomy.PolicyType] = proto.RepeatedField( - proto.ENUM, - number=4, - enum=policytagmanager.Taxonomy.PolicyType, - ) - - -class SerializedPolicyTag(proto.Message): - r"""Message representing one policy tag when exported as a nested - proto. - - Attributes: - policy_tag (str): - Resource name of the policy tag. - - This field will be ignored when calling - ImportTaxonomies. - display_name (str): - Required. Display name of the policy tag. Max - 200 bytes when encoded in UTF-8. - description (str): - Description of the serialized policy tag. The - length of the description is limited to 2000 - bytes when encoded in UTF-8. If not set, - defaults to an empty description. - child_policy_tags (MutableSequence[google.cloud.datacatalog_v1beta1.types.SerializedPolicyTag]): - Children of the policy tag if any. - """ - - policy_tag: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - child_policy_tags: MutableSequence['SerializedPolicyTag'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='SerializedPolicyTag', - ) - - -class ImportTaxonomiesRequest(proto.Message): - r"""Request message for - [ImportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ImportTaxonomies]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. Resource name of project that the - imported taxonomies will belong to. - inline_source (google.cloud.datacatalog_v1beta1.types.InlineSource): - Inline source used for taxonomies to be - imported. - - This field is a member of `oneof`_ ``source``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - inline_source: 'InlineSource' = proto.Field( - proto.MESSAGE, - number=2, - oneof='source', - message='InlineSource', - ) - - -class InlineSource(proto.Message): - r"""Inline source used for taxonomies import. - - Attributes: - taxonomies (MutableSequence[google.cloud.datacatalog_v1beta1.types.SerializedTaxonomy]): - Required. Taxonomies to be imported. - """ - - taxonomies: MutableSequence['SerializedTaxonomy'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='SerializedTaxonomy', - ) - - -class ImportTaxonomiesResponse(proto.Message): - r"""Response message for - [ImportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ImportTaxonomies]. - - Attributes: - taxonomies (MutableSequence[google.cloud.datacatalog_v1beta1.types.Taxonomy]): - Taxonomies that were imported. - """ - - taxonomies: MutableSequence[policytagmanager.Taxonomy] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=policytagmanager.Taxonomy, - ) - - -class ExportTaxonomiesRequest(proto.Message): - r"""Request message for - [ExportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ExportTaxonomies]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. Resource name of the project that - taxonomies to be exported will share. - taxonomies (MutableSequence[str]): - Required. Resource names of the taxonomies to - be exported. - serialized_taxonomies (bool): - Export taxonomies as serialized taxonomies. - - This field is a member of `oneof`_ ``destination``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - taxonomies: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - serialized_taxonomies: bool = proto.Field( - proto.BOOL, - number=3, - oneof='destination', - ) - - -class ExportTaxonomiesResponse(proto.Message): - r"""Response message for - [ExportTaxonomies][google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ExportTaxonomies]. - - Attributes: - taxonomies (MutableSequence[google.cloud.datacatalog_v1beta1.types.SerializedTaxonomy]): - List of taxonomies and policy tags in a tree - structure. - """ - - taxonomies: MutableSequence['SerializedTaxonomy'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='SerializedTaxonomy', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/schema.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/schema.py deleted file mode 100644 index 152d45255fc8..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/schema.py +++ /dev/null @@ -1,93 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1beta1', - manifest={ - 'Schema', - 'ColumnSchema', - }, -) - - -class Schema(proto.Message): - r"""Represents a schema (e.g. BigQuery, GoogleSQL, Avro schema). - - Attributes: - columns (MutableSequence[google.cloud.datacatalog_v1beta1.types.ColumnSchema]): - Required. Schema of columns. A maximum of - 10,000 columns and sub-columns can be specified. - """ - - columns: MutableSequence['ColumnSchema'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='ColumnSchema', - ) - - -class ColumnSchema(proto.Message): - r"""Representation of a column within a schema. Columns could be - nested inside other columns. - - Attributes: - column (str): - Required. Name of the column. - type_ (str): - Required. Type of the column. - description (str): - Optional. Description of the column. Default - value is an empty string. - mode (str): - Optional. A column's mode indicates whether the values in - this column are required, nullable, etc. Only ``NULLABLE``, - ``REQUIRED`` and ``REPEATED`` are supported. Default mode is - ``NULLABLE``. - subcolumns (MutableSequence[google.cloud.datacatalog_v1beta1.types.ColumnSchema]): - Optional. Schema of sub-columns. A column can - have zero or more sub-columns. - """ - - column: str = proto.Field( - proto.STRING, - number=6, - ) - type_: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - mode: str = proto.Field( - proto.STRING, - number=3, - ) - subcolumns: MutableSequence['ColumnSchema'] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='ColumnSchema', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/search.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/search.py deleted file mode 100644 index 46c4f17e7c4b..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/search.py +++ /dev/null @@ -1,114 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1beta1', - manifest={ - 'SearchResultType', - 'SearchCatalogResult', - }, -) - - -class SearchResultType(proto.Enum): - r"""The different types of resources that can be returned in - search. - - Values: - SEARCH_RESULT_TYPE_UNSPECIFIED (0): - Default unknown type. - ENTRY (1): - An [Entry][google.cloud.datacatalog.v1beta1.Entry]. - TAG_TEMPLATE (2): - A - [TagTemplate][google.cloud.datacatalog.v1beta1.TagTemplate]. - ENTRY_GROUP (3): - An - [EntryGroup][google.cloud.datacatalog.v1beta1.EntryGroup]. - """ - SEARCH_RESULT_TYPE_UNSPECIFIED = 0 - ENTRY = 1 - TAG_TEMPLATE = 2 - ENTRY_GROUP = 3 - - -class SearchCatalogResult(proto.Message): - r"""A result that appears in the response of a search request. - Each result captures details of one entry that matches the - search. - - Attributes: - search_result_type (google.cloud.datacatalog_v1beta1.types.SearchResultType): - Type of the search result. This field can be - used to determine which Get method to call to - fetch the full resource. - search_result_subtype (str): - Sub-type of the search result. This is a dot-delimited - description of the resource's full type, and is the same as - the value callers would provide in the "type" search facet. - Examples: ``entry.table``, ``entry.dataStream``, - ``tagTemplate``. - relative_resource_name (str): - The relative resource name of the resource in URL format. - Examples: - - - ``projects/{project_id}/locations/{location_id}/entryGroups/{entry_group_id}/entries/{entry_id}`` - - ``projects/{project_id}/tagTemplates/{tag_template_id}`` - linked_resource (str): - The full name of the cloud resource the entry belongs to. - See: - https://cloud.google.com/apis/design/resource_names#full_resource_name. - Example: - - - ``//bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId`` - modify_time (google.protobuf.timestamp_pb2.Timestamp): - Last-modified timestamp of the entry from the - managing system. - """ - - search_result_type: 'SearchResultType' = proto.Field( - proto.ENUM, - number=1, - enum='SearchResultType', - ) - search_result_subtype: str = proto.Field( - proto.STRING, - number=2, - ) - relative_resource_name: str = proto.Field( - proto.STRING, - number=3, - ) - linked_resource: str = proto.Field( - proto.STRING, - number=4, - ) - modify_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/table_spec.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/table_spec.py deleted file mode 100644 index 5eadf5fd9317..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/table_spec.py +++ /dev/null @@ -1,165 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1beta1', - manifest={ - 'TableSourceType', - 'BigQueryTableSpec', - 'ViewSpec', - 'TableSpec', - 'BigQueryDateShardedSpec', - }, -) - - -class TableSourceType(proto.Enum): - r"""Table source type. - - Values: - TABLE_SOURCE_TYPE_UNSPECIFIED (0): - Default unknown type. - BIGQUERY_VIEW (2): - Table view. - BIGQUERY_TABLE (5): - BigQuery native table. - BIGQUERY_MATERIALIZED_VIEW (7): - BigQuery materialized view. - """ - TABLE_SOURCE_TYPE_UNSPECIFIED = 0 - BIGQUERY_VIEW = 2 - BIGQUERY_TABLE = 5 - BIGQUERY_MATERIALIZED_VIEW = 7 - - -class BigQueryTableSpec(proto.Message): - r"""Describes a BigQuery table. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - table_source_type (google.cloud.datacatalog_v1beta1.types.TableSourceType): - Output only. The table source type. - view_spec (google.cloud.datacatalog_v1beta1.types.ViewSpec): - Table view specification. This field should only be - populated if ``table_source_type`` is ``BIGQUERY_VIEW``. - - This field is a member of `oneof`_ ``type_spec``. - table_spec (google.cloud.datacatalog_v1beta1.types.TableSpec): - Spec of a BigQuery table. This field should only be - populated if ``table_source_type`` is ``BIGQUERY_TABLE``. - - This field is a member of `oneof`_ ``type_spec``. - """ - - table_source_type: 'TableSourceType' = proto.Field( - proto.ENUM, - number=1, - enum='TableSourceType', - ) - view_spec: 'ViewSpec' = proto.Field( - proto.MESSAGE, - number=2, - oneof='type_spec', - message='ViewSpec', - ) - table_spec: 'TableSpec' = proto.Field( - proto.MESSAGE, - number=3, - oneof='type_spec', - message='TableSpec', - ) - - -class ViewSpec(proto.Message): - r"""Table view specification. - - Attributes: - view_query (str): - Output only. The query that defines the table - view. - """ - - view_query: str = proto.Field( - proto.STRING, - number=1, - ) - - -class TableSpec(proto.Message): - r"""Normal BigQuery table spec. - - Attributes: - grouped_entry (str): - Output only. If the table is a dated shard, i.e., with name - pattern ``[prefix]YYYYMMDD``, ``grouped_entry`` is the Data - Catalog resource name of the date sharded grouped entry, for - example, - ``projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}``. - Otherwise, ``grouped_entry`` is empty. - """ - - grouped_entry: str = proto.Field( - proto.STRING, - number=1, - ) - - -class BigQueryDateShardedSpec(proto.Message): - r"""Spec for a group of BigQuery tables with name pattern - ``[prefix]YYYYMMDD``. Context: - https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning_versus_sharding - - Attributes: - dataset (str): - Output only. The Data Catalog resource name of the dataset - entry the current table belongs to, for example, - ``projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}``. - table_prefix (str): - Output only. The table name prefix of the shards. The name - of any given shard is ``[table_prefix]YYYYMMDD``, for - example, for shard ``MyTable20180101``, the ``table_prefix`` - is ``MyTable``. - shard_count (int): - Output only. Total number of shards. - """ - - dataset: str = proto.Field( - proto.STRING, - number=1, - ) - table_prefix: str = proto.Field( - proto.STRING, - number=2, - ) - shard_count: int = proto.Field( - proto.INT64, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/tags.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/tags.py deleted file mode 100644 index b9d13daa19a4..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/tags.py +++ /dev/null @@ -1,407 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1beta1', - manifest={ - 'Tag', - 'TagField', - 'TagTemplate', - 'TagTemplateField', - 'FieldType', - }, -) - - -class Tag(proto.Message): - r"""Tags are used to attach custom metadata to Data Catalog resources. - Tags conform to the specifications within their tag template. - - See `Data Catalog - IAM `__ for - information on the permissions needed to create or view tags. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - The resource name of the tag in URL format. Example: - - - projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id} - - where ``tag_id`` is a system-generated identifier. Note that - this Tag may not actually be stored in the location in this - name. - template (str): - Required. The resource name of the tag template that this - tag uses. Example: - - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} - - This field cannot be modified after creation. - template_display_name (str): - Output only. The display name of the tag - template. - column (str): - Resources like Entry can have schemas associated with them. - This scope allows users to attach tags to an individual - column based on that schema. - - For attaching a tag to a nested column, use ``.`` to - separate the column names. Example: - - - ``outer_column.inner_column`` - - This field is a member of `oneof`_ ``scope``. - fields (MutableMapping[str, google.cloud.datacatalog_v1beta1.types.TagField]): - Required. This maps the ID of a tag field to - the value of and additional information about - that field. Valid field IDs are defined by the - tag's template. A tag must have at least 1 field - and at most 500 fields. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - template: str = proto.Field( - proto.STRING, - number=2, - ) - template_display_name: str = proto.Field( - proto.STRING, - number=5, - ) - column: str = proto.Field( - proto.STRING, - number=4, - oneof='scope', - ) - fields: MutableMapping[str, 'TagField'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=3, - message='TagField', - ) - - -class TagField(proto.Message): - r"""Contains the value and supporting information for a field within a - [Tag][google.cloud.datacatalog.v1beta1.Tag]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - display_name (str): - Output only. The display name of this field. - double_value (float): - Holds the value for a tag field with double - type. - - This field is a member of `oneof`_ ``kind``. - string_value (str): - Holds the value for a tag field with string - type. - - This field is a member of `oneof`_ ``kind``. - bool_value (bool): - Holds the value for a tag field with boolean - type. - - This field is a member of `oneof`_ ``kind``. - timestamp_value (google.protobuf.timestamp_pb2.Timestamp): - Holds the value for a tag field with - timestamp type. - - This field is a member of `oneof`_ ``kind``. - enum_value (google.cloud.datacatalog_v1beta1.types.TagField.EnumValue): - Holds the value for a tag field with enum - type. This value must be one of the allowed - values in the definition of this enum. - - This field is a member of `oneof`_ ``kind``. - order (int): - Output only. The order of this field with respect to other - fields in this tag. It can be set in - [Tag][google.cloud.datacatalog.v1beta1.TagTemplateField.order]. - For example, a higher value can indicate a more important - field. The value can be negative. Multiple fields can have - the same order, and field orders within a tag do not have to - be sequential. - """ - - class EnumValue(proto.Message): - r"""Holds an enum value. - - Attributes: - display_name (str): - The display name of the enum value. - """ - - display_name: str = proto.Field( - proto.STRING, - number=1, - ) - - display_name: str = proto.Field( - proto.STRING, - number=1, - ) - double_value: float = proto.Field( - proto.DOUBLE, - number=2, - oneof='kind', - ) - string_value: str = proto.Field( - proto.STRING, - number=3, - oneof='kind', - ) - bool_value: bool = proto.Field( - proto.BOOL, - number=4, - oneof='kind', - ) - timestamp_value: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - oneof='kind', - message=timestamp_pb2.Timestamp, - ) - enum_value: EnumValue = proto.Field( - proto.MESSAGE, - number=6, - oneof='kind', - message=EnumValue, - ) - order: int = proto.Field( - proto.INT32, - number=7, - ) - - -class TagTemplate(proto.Message): - r"""A tag template defines a tag, which can have one or more typed - fields. The template is used to create and attach the tag to Google - Cloud resources. `Tag template - roles `__ - provide permissions to create, edit, and use the template. See, for - example, the `TagTemplate - User `__ - role, which includes permission to use the tag template to tag - resources. - - Attributes: - name (str): - The resource name of the tag template in URL format. - Example: - - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} - - Note that this TagTemplate and its child resources may not - actually be stored in the location in this name. - display_name (str): - The display name for this template. Defaults - to an empty string. - fields (MutableMapping[str, google.cloud.datacatalog_v1beta1.types.TagTemplateField]): - Required. Map of tag template field IDs to the settings for - the field. This map is an exhaustive list of the allowed - fields. This map must contain at least one field and at most - 500 fields. - - The keys to this map are tag template field IDs. Field IDs - can contain letters (both uppercase and lowercase), numbers - (0-9) and underscores (_). Field IDs must be at least 1 - character long and at most 64 characters long. Field IDs - must start with a letter or underscore. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - fields: MutableMapping[str, 'TagTemplateField'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=3, - message='TagTemplateField', - ) - - -class TagTemplateField(proto.Message): - r"""The template for an individual field within a tag template. - - Attributes: - name (str): - Output only. The resource name of the tag template field in - URL format. Example: - - - projects/{project_id}/locations/{location}/tagTemplates/{tag_template}/fields/{field} - - Note that this TagTemplateField may not actually be stored - in the location in this name. - display_name (str): - The display name for this field. Defaults to - an empty string. - type_ (google.cloud.datacatalog_v1beta1.types.FieldType): - Required. The type of value this tag field - can contain. - is_required (bool): - Whether this is a required field. Defaults to - false. - description (str): - The description for this field. Defaults to - an empty string. - order (int): - The order of this field with respect to other - fields in this tag template. A higher value - indicates a more important field. The value can - be negative. Multiple fields can have the same - order, and field orders within a tag do not have - to be sequential. - """ - - name: str = proto.Field( - proto.STRING, - number=6, - ) - display_name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: 'FieldType' = proto.Field( - proto.MESSAGE, - number=2, - message='FieldType', - ) - is_required: bool = proto.Field( - proto.BOOL, - number=3, - ) - description: str = proto.Field( - proto.STRING, - number=4, - ) - order: int = proto.Field( - proto.INT32, - number=5, - ) - - -class FieldType(proto.Message): - r""" - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - primitive_type (google.cloud.datacatalog_v1beta1.types.FieldType.PrimitiveType): - Represents primitive types - string, bool - etc. - - This field is a member of `oneof`_ ``type_decl``. - enum_type (google.cloud.datacatalog_v1beta1.types.FieldType.EnumType): - Represents an enum type. - - This field is a member of `oneof`_ ``type_decl``. - """ - class PrimitiveType(proto.Enum): - r""" - - Values: - PRIMITIVE_TYPE_UNSPECIFIED (0): - This is the default invalid value for a type. - DOUBLE (1): - A double precision number. - STRING (2): - An UTF-8 string. - BOOL (3): - A boolean value. - TIMESTAMP (4): - A timestamp. - """ - PRIMITIVE_TYPE_UNSPECIFIED = 0 - DOUBLE = 1 - STRING = 2 - BOOL = 3 - TIMESTAMP = 4 - - class EnumType(proto.Message): - r""" - - Attributes: - allowed_values (MutableSequence[google.cloud.datacatalog_v1beta1.types.FieldType.EnumType.EnumValue]): - - """ - - class EnumValue(proto.Message): - r""" - - Attributes: - display_name (str): - Required. The display name of the enum value. - Must not be an empty string. - """ - - display_name: str = proto.Field( - proto.STRING, - number=1, - ) - - allowed_values: MutableSequence['FieldType.EnumType.EnumValue'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='FieldType.EnumType.EnumValue', - ) - - primitive_type: PrimitiveType = proto.Field( - proto.ENUM, - number=1, - oneof='type_decl', - enum=PrimitiveType, - ) - enum_type: EnumType = proto.Field( - proto.MESSAGE, - number=2, - oneof='type_decl', - message=EnumType, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/timestamps.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/timestamps.py deleted file mode 100644 index ec7f95ba634e..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/timestamps.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1beta1', - manifest={ - 'SystemTimestamps', - }, -) - - -class SystemTimestamps(proto.Message): - r"""Timestamps about this resource according to a particular - system. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - The creation time of the resource within the - given system. - update_time (google.protobuf.timestamp_pb2.Timestamp): - The last-modified time of the resource within - the given system. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The expiration time of the - resource within the given system. Currently only - apllicable to BigQuery resources. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/usage.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/usage.py deleted file mode 100644 index 02d3fa0c92e7..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/google/cloud/datacatalog_v1beta1/types/usage.py +++ /dev/null @@ -1,104 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.datacatalog.v1beta1', - manifest={ - 'UsageStats', - 'UsageSignal', - }, -) - - -class UsageStats(proto.Message): - r"""Detailed counts on the entry's usage. - Caveats: - - - Only BigQuery tables have usage stats - - The usage stats only include BigQuery query jobs - - The usage stats might be underestimated, e.g. wildcard table - references are not yet counted in usage computation - https://cloud.google.com/bigquery/docs/querying-wildcard-tables - - Attributes: - total_completions (float): - The number of times that the underlying entry - was successfully used. - total_failures (float): - The number of times that the underlying entry - was attempted to be used but failed. - total_cancellations (float): - The number of times that the underlying entry - was attempted to be used but was cancelled by - the user. - total_execution_time_for_completions_millis (float): - Total time spent (in milliseconds) during - uses the resulted in completions. - """ - - total_completions: float = proto.Field( - proto.FLOAT, - number=1, - ) - total_failures: float = proto.Field( - proto.FLOAT, - number=2, - ) - total_cancellations: float = proto.Field( - proto.FLOAT, - number=3, - ) - total_execution_time_for_completions_millis: float = proto.Field( - proto.FLOAT, - number=4, - ) - - -class UsageSignal(proto.Message): - r"""The set of all usage signals that we store in Data Catalog. - - Attributes: - update_time (google.protobuf.timestamp_pb2.Timestamp): - The timestamp of the end of the usage - statistics duration. - usage_within_time_range (MutableMapping[str, google.cloud.datacatalog_v1beta1.types.UsageStats]): - Usage statistics over each of the pre-defined - time ranges, supported strings for time ranges - are {"24H", "7D", "30D"}. - """ - - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - usage_within_time_range: MutableMapping[str, 'UsageStats'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=2, - message='UsageStats', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/mypy.ini b/owl-bot-staging/google-cloud-datacatalog/v1beta1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/noxfile.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/noxfile.py deleted file mode 100644 index c553519e1586..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/noxfile.py +++ /dev/null @@ -1,184 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.11" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "lint_setup_py", -] - -@nox.session(python=ALL_PYTHON) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/datacatalog_v1beta1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_async.py deleted file mode 100644 index 2a4139403363..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_async.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_CreateEntry_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_create_entry(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - entry = datacatalog_v1beta1.Entry() - entry.type_ = "FILESET" - entry.integrated_system = "CLOUD_PUBSUB" - entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] - - request = datacatalog_v1beta1.CreateEntryRequest( - parent="parent_value", - entry_id="entry_id_value", - entry=entry, - ) - - # Make the request - response = await client.create_entry(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_CreateEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_group_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_group_async.py deleted file mode 100644 index 8e52263e6986..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_group_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_CreateEntryGroup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_create_entry_group(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.CreateEntryGroupRequest( - parent="parent_value", - entry_group_id="entry_group_id_value", - ) - - # Make the request - response = await client.create_entry_group(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_CreateEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_group_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_group_sync.py deleted file mode 100644 index 0fb557a1b7a4..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_group_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_CreateEntryGroup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_create_entry_group(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.CreateEntryGroupRequest( - parent="parent_value", - entry_group_id="entry_group_id_value", - ) - - # Make the request - response = client.create_entry_group(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_CreateEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_sync.py deleted file mode 100644 index b114478ce776..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_entry_sync.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_CreateEntry_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_create_entry(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - entry = datacatalog_v1beta1.Entry() - entry.type_ = "FILESET" - entry.integrated_system = "CLOUD_PUBSUB" - entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] - - request = datacatalog_v1beta1.CreateEntryRequest( - parent="parent_value", - entry_id="entry_id_value", - entry=entry, - ) - - # Make the request - response = client.create_entry(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_CreateEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_async.py deleted file mode 100644 index ec5863396bc1..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_CreateTag_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_create_tag(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - tag = datacatalog_v1beta1.Tag() - tag.column = "column_value" - tag.template = "template_value" - - request = datacatalog_v1beta1.CreateTagRequest( - parent="parent_value", - tag=tag, - ) - - # Make the request - response = await client.create_tag(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_CreateTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_sync.py deleted file mode 100644 index f8821f5cf699..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_CreateTag_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_create_tag(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - tag = datacatalog_v1beta1.Tag() - tag.column = "column_value" - tag.template = "template_value" - - request = datacatalog_v1beta1.CreateTagRequest( - parent="parent_value", - tag=tag, - ) - - # Make the request - response = client.create_tag(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_CreateTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_async.py deleted file mode 100644 index ae5841be4d02..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTagTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_create_tag_template(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.CreateTagTemplateRequest( - parent="parent_value", - tag_template_id="tag_template_id_value", - ) - - # Make the request - response = await client.create_tag_template(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplate_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_async.py deleted file mode 100644 index 3deeeb4b04b7..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTagTemplateField -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplateField_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_create_tag_template_field(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - tag_template_field = datacatalog_v1beta1.TagTemplateField() - tag_template_field.type_.primitive_type = "TIMESTAMP" - - request = datacatalog_v1beta1.CreateTagTemplateFieldRequest( - parent="parent_value", - tag_template_field_id="tag_template_field_id_value", - tag_template_field=tag_template_field, - ) - - # Make the request - response = await client.create_tag_template_field(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplateField_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_sync.py deleted file mode 100644 index ed7203812ca1..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTagTemplateField -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplateField_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_create_tag_template_field(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - tag_template_field = datacatalog_v1beta1.TagTemplateField() - tag_template_field.type_.primitive_type = "TIMESTAMP" - - request = datacatalog_v1beta1.CreateTagTemplateFieldRequest( - parent="parent_value", - tag_template_field_id="tag_template_field_id_value", - tag_template_field=tag_template_field, - ) - - # Make the request - response = client.create_tag_template_field(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplateField_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_sync.py deleted file mode 100644 index 0b1feb38d688..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_create_tag_template_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTagTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_create_tag_template(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.CreateTagTemplateRequest( - parent="parent_value", - tag_template_id="tag_template_id_value", - ) - - # Make the request - response = client.create_tag_template(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplate_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_async.py deleted file mode 100644 index 93f72a4d05b0..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_DeleteEntry_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_delete_entry(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteEntryRequest( - name="name_value", - ) - - # Make the request - await client.delete_entry(request=request) - - -# [END datacatalog_v1beta1_generated_DataCatalog_DeleteEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_group_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_group_async.py deleted file mode 100644 index 100f8f206e37..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_group_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_DeleteEntryGroup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_delete_entry_group(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteEntryGroupRequest( - name="name_value", - ) - - # Make the request - await client.delete_entry_group(request=request) - - -# [END datacatalog_v1beta1_generated_DataCatalog_DeleteEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_group_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_group_sync.py deleted file mode 100644 index b4e7093432a9..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_group_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_DeleteEntryGroup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_delete_entry_group(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteEntryGroupRequest( - name="name_value", - ) - - # Make the request - client.delete_entry_group(request=request) - - -# [END datacatalog_v1beta1_generated_DataCatalog_DeleteEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_sync.py deleted file mode 100644 index 03f85f447eb1..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_entry_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_DeleteEntry_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_delete_entry(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteEntryRequest( - name="name_value", - ) - - # Make the request - client.delete_entry(request=request) - - -# [END datacatalog_v1beta1_generated_DataCatalog_DeleteEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_async.py deleted file mode 100644 index f8e048c17be4..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_DeleteTag_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_delete_tag(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteTagRequest( - name="name_value", - ) - - # Make the request - await client.delete_tag(request=request) - - -# [END datacatalog_v1beta1_generated_DataCatalog_DeleteTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_sync.py deleted file mode 100644 index 80674f7f2eed..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_DeleteTag_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_delete_tag(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteTagRequest( - name="name_value", - ) - - # Make the request - client.delete_tag(request=request) - - -# [END datacatalog_v1beta1_generated_DataCatalog_DeleteTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_async.py deleted file mode 100644 index 9201de2cd1e6..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTagTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_delete_tag_template(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteTagTemplateRequest( - name="name_value", - force=True, - ) - - # Make the request - await client.delete_tag_template(request=request) - - -# [END datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplate_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_async.py deleted file mode 100644 index 1229caa7a4ab..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTagTemplateField -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplateField_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_delete_tag_template_field(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteTagTemplateFieldRequest( - name="name_value", - force=True, - ) - - # Make the request - await client.delete_tag_template_field(request=request) - - -# [END datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplateField_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_sync.py deleted file mode 100644 index dd8c0e840d8b..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTagTemplateField -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplateField_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_delete_tag_template_field(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteTagTemplateFieldRequest( - name="name_value", - force=True, - ) - - # Make the request - client.delete_tag_template_field(request=request) - - -# [END datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplateField_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_sync.py deleted file mode 100644 index 330e9bed8b36..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_delete_tag_template_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTagTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_delete_tag_template(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteTagTemplateRequest( - name="name_value", - force=True, - ) - - # Make the request - client.delete_tag_template(request=request) - - -# [END datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplate_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_async.py deleted file mode 100644 index a82d9be4ace5..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_GetEntry_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_get_entry(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.GetEntryRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entry(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_GetEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_group_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_group_async.py deleted file mode 100644 index 56282040696a..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_group_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_GetEntryGroup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_get_entry_group(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.GetEntryGroupRequest( - name="name_value", - ) - - # Make the request - response = await client.get_entry_group(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_GetEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_group_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_group_sync.py deleted file mode 100644 index 2da6dca76f65..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_group_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_GetEntryGroup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_get_entry_group(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.GetEntryGroupRequest( - name="name_value", - ) - - # Make the request - response = client.get_entry_group(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_GetEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_sync.py deleted file mode 100644 index 2944e6e6f9b1..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_entry_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_GetEntry_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_get_entry(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.GetEntryRequest( - name="name_value", - ) - - # Make the request - response = client.get_entry(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_GetEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_iam_policy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_iam_policy_async.py deleted file mode 100644 index e1c18a42ce5a..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_GetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_get_iam_policy(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_GetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_iam_policy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_iam_policy_sync.py deleted file mode 100644 index b3d71872a427..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_GetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_get_iam_policy(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_GetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_tag_template_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_tag_template_async.py deleted file mode 100644 index 8a857df051ec..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_tag_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTagTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_GetTagTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_get_tag_template(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.GetTagTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_tag_template(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_GetTagTemplate_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_tag_template_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_tag_template_sync.py deleted file mode 100644 index 37d7a439c87c..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_get_tag_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTagTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_GetTagTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_get_tag_template(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.GetTagTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_tag_template(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_GetTagTemplate_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entries_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entries_async.py deleted file mode 100644 index a7c314f69bf6..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entries_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntries -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_ListEntries_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_list_entries(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ListEntriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entries(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_ListEntries_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entries_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entries_sync.py deleted file mode 100644 index a28b3ca13b33..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entries_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntries -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_ListEntries_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_list_entries(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ListEntriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entries(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_ListEntries_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entry_groups_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entry_groups_async.py deleted file mode 100644 index c8fefd5d8830..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entry_groups_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntryGroups -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_ListEntryGroups_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_list_entry_groups(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ListEntryGroupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_groups(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_ListEntryGroups_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entry_groups_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entry_groups_sync.py deleted file mode 100644 index 19afe7a10caf..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_entry_groups_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListEntryGroups -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_ListEntryGroups_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_list_entry_groups(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ListEntryGroupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_entry_groups(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_ListEntryGroups_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_tags_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_tags_async.py deleted file mode 100644 index f2dac6f015e2..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_tags_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTags -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_ListTags_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_list_tags(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ListTagsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tags(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_ListTags_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_tags_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_tags_sync.py deleted file mode 100644 index daf3a9a17e71..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_list_tags_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTags -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_ListTags_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_list_tags(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ListTagsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tags(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_ListTags_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_lookup_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_lookup_entry_async.py deleted file mode 100644 index 83248ad5b528..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_lookup_entry_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for LookupEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_LookupEntry_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_lookup_entry(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.LookupEntryRequest( - linked_resource="linked_resource_value", - ) - - # Make the request - response = await client.lookup_entry(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_LookupEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_lookup_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_lookup_entry_sync.py deleted file mode 100644 index 6094d0b5c00b..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_lookup_entry_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for LookupEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_LookupEntry_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_lookup_entry(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.LookupEntryRequest( - linked_resource="linked_resource_value", - ) - - # Make the request - response = client.lookup_entry(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_LookupEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_async.py deleted file mode 100644 index 33ed2dbfc39e..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RenameTagTemplateField -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateField_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_rename_tag_template_field(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.RenameTagTemplateFieldRequest( - name="name_value", - new_tag_template_field_id="new_tag_template_field_id_value", - ) - - # Make the request - response = await client.rename_tag_template_field(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateField_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_async.py deleted file mode 100644 index 5452195f0b12..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RenameTagTemplateFieldEnumValue -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_rename_tag_template_field_enum_value(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.RenameTagTemplateFieldEnumValueRequest( - name="name_value", - new_enum_value_display_name="new_enum_value_display_name_value", - ) - - # Make the request - response = await client.rename_tag_template_field_enum_value(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py deleted file mode 100644 index 8cca3156d261..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RenameTagTemplateFieldEnumValue -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_rename_tag_template_field_enum_value(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.RenameTagTemplateFieldEnumValueRequest( - name="name_value", - new_enum_value_display_name="new_enum_value_display_name_value", - ) - - # Make the request - response = client.rename_tag_template_field_enum_value(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_sync.py deleted file mode 100644 index 1d6487816a20..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RenameTagTemplateField -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateField_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_rename_tag_template_field(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.RenameTagTemplateFieldRequest( - name="name_value", - new_tag_template_field_id="new_tag_template_field_id_value", - ) - - # Make the request - response = client.rename_tag_template_field(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateField_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_search_catalog_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_search_catalog_async.py deleted file mode 100644 index 85cbeea3781b..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_search_catalog_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchCatalog -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_SearchCatalog_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_search_catalog(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.SearchCatalogRequest( - ) - - # Make the request - page_result = client.search_catalog(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_SearchCatalog_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_search_catalog_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_search_catalog_sync.py deleted file mode 100644 index a9ddb9429768..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_search_catalog_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchCatalog -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_SearchCatalog_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_search_catalog(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.SearchCatalogRequest( - ) - - # Make the request - page_result = client.search_catalog(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_SearchCatalog_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_set_iam_policy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_set_iam_policy_async.py deleted file mode 100644 index dc05b9c411fd..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_set_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_SetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_set_iam_policy(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_SetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_set_iam_policy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_set_iam_policy_sync.py deleted file mode 100644 index 400a3b82736c..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_set_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_SetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_set_iam_policy(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_SetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_async.py deleted file mode 100644 index f654277fe7fa..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_TestIamPermissions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_test_iam_permissions(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_TestIamPermissions_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_sync.py deleted file mode 100644 index 2f7dc90069ce..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_TestIamPermissions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_test_iam_permissions(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_TestIamPermissions_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_async.py deleted file mode 100644 index 0c57c832761c..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_UpdateEntry_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_update_entry(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - entry = datacatalog_v1beta1.Entry() - entry.type_ = "FILESET" - entry.integrated_system = "CLOUD_PUBSUB" - entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] - - request = datacatalog_v1beta1.UpdateEntryRequest( - entry=entry, - ) - - # Make the request - response = await client.update_entry(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_UpdateEntry_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_group_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_group_async.py deleted file mode 100644 index 804fffb64e4e..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_group_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_UpdateEntryGroup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_update_entry_group(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.UpdateEntryGroupRequest( - ) - - # Make the request - response = await client.update_entry_group(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_UpdateEntryGroup_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_group_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_group_sync.py deleted file mode 100644 index 5f58410c2a82..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_group_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntryGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_UpdateEntryGroup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_update_entry_group(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.UpdateEntryGroupRequest( - ) - - # Make the request - response = client.update_entry_group(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_UpdateEntryGroup_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_sync.py deleted file mode 100644 index 45e5e55b4313..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_entry_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateEntry -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_UpdateEntry_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_update_entry(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - entry = datacatalog_v1beta1.Entry() - entry.type_ = "FILESET" - entry.integrated_system = "CLOUD_PUBSUB" - entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] - - request = datacatalog_v1beta1.UpdateEntryRequest( - entry=entry, - ) - - # Make the request - response = client.update_entry(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_UpdateEntry_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_async.py deleted file mode 100644 index 4c1dde679c99..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_UpdateTag_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_update_tag(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - tag = datacatalog_v1beta1.Tag() - tag.column = "column_value" - tag.template = "template_value" - - request = datacatalog_v1beta1.UpdateTagRequest( - tag=tag, - ) - - # Make the request - response = await client.update_tag(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_UpdateTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_sync.py deleted file mode 100644 index b432ab7b927f..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_UpdateTag_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_update_tag(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - tag = datacatalog_v1beta1.Tag() - tag.column = "column_value" - tag.template = "template_value" - - request = datacatalog_v1beta1.UpdateTagRequest( - tag=tag, - ) - - # Make the request - response = client.update_tag(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_UpdateTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_async.py deleted file mode 100644 index 8a5e268974cb..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTagTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_update_tag_template(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.UpdateTagTemplateRequest( - ) - - # Make the request - response = await client.update_tag_template(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplate_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_async.py deleted file mode 100644 index 62489bdf4b66..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTagTemplateField -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplateField_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_update_tag_template_field(): - # Create a client - client = datacatalog_v1beta1.DataCatalogAsyncClient() - - # Initialize request argument(s) - tag_template_field = datacatalog_v1beta1.TagTemplateField() - tag_template_field.type_.primitive_type = "TIMESTAMP" - - request = datacatalog_v1beta1.UpdateTagTemplateFieldRequest( - name="name_value", - tag_template_field=tag_template_field, - ) - - # Make the request - response = await client.update_tag_template_field(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplateField_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_sync.py deleted file mode 100644 index 7f2c8236d3e2..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTagTemplateField -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplateField_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_update_tag_template_field(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - tag_template_field = datacatalog_v1beta1.TagTemplateField() - tag_template_field.type_.primitive_type = "TIMESTAMP" - - request = datacatalog_v1beta1.UpdateTagTemplateFieldRequest( - name="name_value", - tag_template_field=tag_template_field, - ) - - # Make the request - response = client.update_tag_template_field(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplateField_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_sync.py deleted file mode 100644 index 0874945ed578..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_data_catalog_update_tag_template_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTagTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_update_tag_template(): - # Create a client - client = datacatalog_v1beta1.DataCatalogClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.UpdateTagTemplateRequest( - ) - - # Make the request - response = client.update_tag_template(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplate_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_async.py deleted file mode 100644 index a81b726d9e9a..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreatePolicyTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_CreatePolicyTag_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_create_policy_tag(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.CreatePolicyTagRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_policy_tag(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManager_CreatePolicyTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_sync.py deleted file mode 100644 index 71a62addce00..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreatePolicyTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_CreatePolicyTag_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_create_policy_tag(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.CreatePolicyTagRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_policy_tag(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManager_CreatePolicyTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_async.py deleted file mode 100644 index 27efa9b68ab7..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_CreateTaxonomy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_create_taxonomy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.CreateTaxonomyRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_taxonomy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManager_CreateTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_sync.py deleted file mode 100644 index 1290fd16f3b3..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_CreateTaxonomy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_create_taxonomy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.CreateTaxonomyRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_taxonomy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManager_CreateTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_async.py deleted file mode 100644 index 3c5a5696eee1..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeletePolicyTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_DeletePolicyTag_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_delete_policy_tag(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeletePolicyTagRequest( - name="name_value", - ) - - # Make the request - await client.delete_policy_tag(request=request) - - -# [END datacatalog_v1beta1_generated_PolicyTagManager_DeletePolicyTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_sync.py deleted file mode 100644 index 4041a12fc129..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeletePolicyTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_DeletePolicyTag_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_delete_policy_tag(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeletePolicyTagRequest( - name="name_value", - ) - - # Make the request - client.delete_policy_tag(request=request) - - -# [END datacatalog_v1beta1_generated_PolicyTagManager_DeletePolicyTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_async.py deleted file mode 100644 index e5b78b19a424..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_DeleteTaxonomy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_delete_taxonomy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteTaxonomyRequest( - name="name_value", - ) - - # Make the request - await client.delete_taxonomy(request=request) - - -# [END datacatalog_v1beta1_generated_PolicyTagManager_DeleteTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_sync.py deleted file mode 100644 index 9885d9ca0591..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_DeleteTaxonomy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_delete_taxonomy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.DeleteTaxonomyRequest( - name="name_value", - ) - - # Make the request - client.delete_taxonomy(request=request) - - -# [END datacatalog_v1beta1_generated_PolicyTagManager_DeleteTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_async.py deleted file mode 100644 index 9641a9c267b2..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_GetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_get_iam_policy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManager_GetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_sync.py deleted file mode 100644 index 9f3007cbc08f..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_GetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_get_iam_policy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManager_GetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_async.py deleted file mode 100644 index 2cb680ced5f8..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetPolicyTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_GetPolicyTag_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_get_policy_tag(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.GetPolicyTagRequest( - name="name_value", - ) - - # Make the request - response = await client.get_policy_tag(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManager_GetPolicyTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_sync.py deleted file mode 100644 index 7b0434b6b748..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetPolicyTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_GetPolicyTag_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_get_policy_tag(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.GetPolicyTagRequest( - name="name_value", - ) - - # Make the request - response = client.get_policy_tag(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManager_GetPolicyTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_async.py deleted file mode 100644 index 3a98f1d61b29..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_GetTaxonomy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_get_taxonomy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.GetTaxonomyRequest( - name="name_value", - ) - - # Make the request - response = await client.get_taxonomy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManager_GetTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_sync.py deleted file mode 100644 index a2a1fd2f41c3..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_GetTaxonomy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_get_taxonomy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.GetTaxonomyRequest( - name="name_value", - ) - - # Make the request - response = client.get_taxonomy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManager_GetTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_async.py deleted file mode 100644 index 3973328cb7ea..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListPolicyTags -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_ListPolicyTags_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_list_policy_tags(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ListPolicyTagsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_policy_tags(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManager_ListPolicyTags_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_sync.py deleted file mode 100644 index 925b44bb886b..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListPolicyTags -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_ListPolicyTags_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_list_policy_tags(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ListPolicyTagsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_policy_tags(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManager_ListPolicyTags_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_async.py deleted file mode 100644 index 44744709b9e1..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTaxonomies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_ListTaxonomies_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_list_taxonomies(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ListTaxonomiesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_taxonomies(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManager_ListTaxonomies_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_sync.py deleted file mode 100644 index 5dd8a071a4bb..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTaxonomies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_ListTaxonomies_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_list_taxonomies(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ListTaxonomiesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_taxonomies(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManager_ListTaxonomies_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_async.py deleted file mode 100644 index a8f5623b7c2c..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExportTaxonomies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ExportTaxonomies_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_export_taxonomies(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerSerializationAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ExportTaxonomiesRequest( - serialized_taxonomies=True, - parent="parent_value", - taxonomies=['taxonomies_value1', 'taxonomies_value2'], - ) - - # Make the request - response = await client.export_taxonomies(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ExportTaxonomies_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py deleted file mode 100644 index f609cb7fa09c..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExportTaxonomies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ExportTaxonomies_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_export_taxonomies(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerSerializationClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.ExportTaxonomiesRequest( - serialized_taxonomies=True, - parent="parent_value", - taxonomies=['taxonomies_value1', 'taxonomies_value2'], - ) - - # Make the request - response = client.export_taxonomies(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ExportTaxonomies_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_async.py deleted file mode 100644 index b22aefd13e34..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ImportTaxonomies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ImportTaxonomies_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_import_taxonomies(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerSerializationAsyncClient() - - # Initialize request argument(s) - inline_source = datacatalog_v1beta1.InlineSource() - inline_source.taxonomies.display_name = "display_name_value" - - request = datacatalog_v1beta1.ImportTaxonomiesRequest( - inline_source=inline_source, - parent="parent_value", - ) - - # Make the request - response = await client.import_taxonomies(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ImportTaxonomies_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py deleted file mode 100644 index 78e7a29f3cdd..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ImportTaxonomies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ImportTaxonomies_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_import_taxonomies(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerSerializationClient() - - # Initialize request argument(s) - inline_source = datacatalog_v1beta1.InlineSource() - inline_source.taxonomies.display_name = "display_name_value" - - request = datacatalog_v1beta1.ImportTaxonomiesRequest( - inline_source=inline_source, - parent="parent_value", - ) - - # Make the request - response = client.import_taxonomies(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ImportTaxonomies_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_async.py deleted file mode 100644 index e1205653eb19..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_SetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_set_iam_policy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManager_SetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_sync.py deleted file mode 100644 index e053df5959a8..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_SetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_set_iam_policy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManager_SetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_async.py deleted file mode 100644 index 84a92fa19f8d..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_TestIamPermissions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_test_iam_permissions(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManager_TestIamPermissions_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_sync.py deleted file mode 100644 index 6ee09e00ad9b..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_TestIamPermissions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_test_iam_permissions(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManager_TestIamPermissions_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_async.py deleted file mode 100644 index 039ffcce1172..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdatePolicyTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_UpdatePolicyTag_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_update_policy_tag(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.UpdatePolicyTagRequest( - ) - - # Make the request - response = await client.update_policy_tag(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManager_UpdatePolicyTag_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_sync.py deleted file mode 100644 index 88a1346aaf1a..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdatePolicyTag -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_UpdatePolicyTag_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_update_policy_tag(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.UpdatePolicyTagRequest( - ) - - # Make the request - response = client.update_policy_tag(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManager_UpdatePolicyTag_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_async.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_async.py deleted file mode 100644 index f027cd290376..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_UpdateTaxonomy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -async def sample_update_taxonomy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerAsyncClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.UpdateTaxonomyRequest( - ) - - # Make the request - response = await client.update_taxonomy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManager_UpdateTaxonomy_async] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_sync.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_sync.py deleted file mode 100644 index 514f310b730c..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTaxonomy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-datacatalog - - -# [START datacatalog_v1beta1_generated_PolicyTagManager_UpdateTaxonomy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import datacatalog_v1beta1 - - -def sample_update_taxonomy(): - # Create a client - client = datacatalog_v1beta1.PolicyTagManagerClient() - - # Initialize request argument(s) - request = datacatalog_v1beta1.UpdateTaxonomyRequest( - ) - - # Make the request - response = client.update_taxonomy(request=request) - - # Handle the response - print(response) - -# [END datacatalog_v1beta1_generated_PolicyTagManager_UpdateTaxonomy_sync] diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json b/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json deleted file mode 100644 index 14e0e75feb01..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json +++ /dev/null @@ -1,7024 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.datacatalog.v1beta1", - "version": "v1beta1" - } - ], - "language": "PYTHON", - "name": "google-cloud-datacatalog", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.create_entry_group", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntryGroup", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "CreateEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.CreateEntryGroupRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entry_group_id", - "type": "str" - }, - { - "name": "entry_group", - "type": "google.cloud.datacatalog_v1beta1.types.EntryGroup" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.EntryGroup", - "shortName": "create_entry_group" - }, - "description": "Sample for CreateEntryGroup", - "file": "datacatalog_v1beta1_generated_data_catalog_create_entry_group_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_CreateEntryGroup_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_create_entry_group_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.create_entry_group", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntryGroup", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "CreateEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.CreateEntryGroupRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entry_group_id", - "type": "str" - }, - { - "name": "entry_group", - "type": "google.cloud.datacatalog_v1beta1.types.EntryGroup" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.EntryGroup", - "shortName": "create_entry_group" - }, - "description": "Sample for CreateEntryGroup", - "file": "datacatalog_v1beta1_generated_data_catalog_create_entry_group_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_CreateEntryGroup_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_create_entry_group_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.create_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "CreateEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.CreateEntryRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entry_id", - "type": "str" - }, - { - "name": "entry", - "type": "google.cloud.datacatalog_v1beta1.types.Entry" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.Entry", - "shortName": "create_entry" - }, - "description": "Sample for CreateEntry", - "file": "datacatalog_v1beta1_generated_data_catalog_create_entry_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_CreateEntry_async", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_create_entry_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.create_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "CreateEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.CreateEntryRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "entry_id", - "type": "str" - }, - { - "name": "entry", - "type": "google.cloud.datacatalog_v1beta1.types.Entry" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.Entry", - "shortName": "create_entry" - }, - "description": "Sample for CreateEntry", - "file": "datacatalog_v1beta1_generated_data_catalog_create_entry_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_CreateEntry_sync", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_create_entry_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.create_tag_template_field", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplateField", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "CreateTagTemplateField" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.CreateTagTemplateFieldRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "tag_template_field_id", - "type": "str" - }, - { - "name": "tag_template_field", - "type": "google.cloud.datacatalog_v1beta1.types.TagTemplateField" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplateField", - "shortName": "create_tag_template_field" - }, - "description": "Sample for CreateTagTemplateField", - "file": "datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplateField_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.create_tag_template_field", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplateField", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "CreateTagTemplateField" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.CreateTagTemplateFieldRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "tag_template_field_id", - "type": "str" - }, - { - "name": "tag_template_field", - "type": "google.cloud.datacatalog_v1beta1.types.TagTemplateField" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplateField", - "shortName": "create_tag_template_field" - }, - "description": "Sample for CreateTagTemplateField", - "file": "datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplateField_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_create_tag_template_field_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.create_tag_template", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplate", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "CreateTagTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.CreateTagTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "tag_template_id", - "type": "str" - }, - { - "name": "tag_template", - "type": "google.cloud.datacatalog_v1beta1.types.TagTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplate", - "shortName": "create_tag_template" - }, - "description": "Sample for CreateTagTemplate", - "file": "datacatalog_v1beta1_generated_data_catalog_create_tag_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplate_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_create_tag_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.create_tag_template", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplate", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "CreateTagTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.CreateTagTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "tag_template_id", - "type": "str" - }, - { - "name": "tag_template", - "type": "google.cloud.datacatalog_v1beta1.types.TagTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplate", - "shortName": "create_tag_template" - }, - "description": "Sample for CreateTagTemplate", - "file": "datacatalog_v1beta1_generated_data_catalog_create_tag_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_CreateTagTemplate_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_create_tag_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.create_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.CreateTag", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "CreateTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.CreateTagRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "tag", - "type": "google.cloud.datacatalog_v1beta1.types.Tag" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.Tag", - "shortName": "create_tag" - }, - "description": "Sample for CreateTag", - "file": "datacatalog_v1beta1_generated_data_catalog_create_tag_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_CreateTag_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_create_tag_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.create_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.CreateTag", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "CreateTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.CreateTagRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "tag", - "type": "google.cloud.datacatalog_v1beta1.types.Tag" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.Tag", - "shortName": "create_tag" - }, - "description": "Sample for CreateTag", - "file": "datacatalog_v1beta1_generated_data_catalog_create_tag_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_CreateTag_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_create_tag_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.delete_entry_group", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntryGroup", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "DeleteEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.DeleteEntryGroupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_entry_group" - }, - "description": "Sample for DeleteEntryGroup", - "file": "datacatalog_v1beta1_generated_data_catalog_delete_entry_group_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_DeleteEntryGroup_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_delete_entry_group_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.delete_entry_group", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntryGroup", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "DeleteEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.DeleteEntryGroupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_entry_group" - }, - "description": "Sample for DeleteEntryGroup", - "file": "datacatalog_v1beta1_generated_data_catalog_delete_entry_group_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_DeleteEntryGroup_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_delete_entry_group_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.delete_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "DeleteEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.DeleteEntryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_entry" - }, - "description": "Sample for DeleteEntry", - "file": "datacatalog_v1beta1_generated_data_catalog_delete_entry_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_DeleteEntry_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_delete_entry_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.delete_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "DeleteEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.DeleteEntryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_entry" - }, - "description": "Sample for DeleteEntry", - "file": "datacatalog_v1beta1_generated_data_catalog_delete_entry_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_DeleteEntry_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_delete_entry_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.delete_tag_template_field", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplateField", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "DeleteTagTemplateField" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.DeleteTagTemplateFieldRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "force", - "type": "bool" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_tag_template_field" - }, - "description": "Sample for DeleteTagTemplateField", - "file": "datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplateField_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.delete_tag_template_field", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplateField", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "DeleteTagTemplateField" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.DeleteTagTemplateFieldRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "force", - "type": "bool" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_tag_template_field" - }, - "description": "Sample for DeleteTagTemplateField", - "file": "datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplateField_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_delete_tag_template_field_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.delete_tag_template", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplate", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "DeleteTagTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.DeleteTagTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "force", - "type": "bool" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_tag_template" - }, - "description": "Sample for DeleteTagTemplate", - "file": "datacatalog_v1beta1_generated_data_catalog_delete_tag_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplate_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_delete_tag_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.delete_tag_template", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplate", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "DeleteTagTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.DeleteTagTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "force", - "type": "bool" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_tag_template" - }, - "description": "Sample for DeleteTagTemplate", - "file": "datacatalog_v1beta1_generated_data_catalog_delete_tag_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_DeleteTagTemplate_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_delete_tag_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.delete_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTag", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "DeleteTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.DeleteTagRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_tag" - }, - "description": "Sample for DeleteTag", - "file": "datacatalog_v1beta1_generated_data_catalog_delete_tag_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_DeleteTag_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_delete_tag_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.delete_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTag", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "DeleteTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.DeleteTagRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_tag" - }, - "description": "Sample for DeleteTag", - "file": "datacatalog_v1beta1_generated_data_catalog_delete_tag_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_DeleteTag_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_delete_tag_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.get_entry_group", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.GetEntryGroup", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "GetEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.GetEntryGroupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "read_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.EntryGroup", - "shortName": "get_entry_group" - }, - "description": "Sample for GetEntryGroup", - "file": "datacatalog_v1beta1_generated_data_catalog_get_entry_group_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_GetEntryGroup_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_get_entry_group_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.get_entry_group", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.GetEntryGroup", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "GetEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.GetEntryGroupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "read_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.EntryGroup", - "shortName": "get_entry_group" - }, - "description": "Sample for GetEntryGroup", - "file": "datacatalog_v1beta1_generated_data_catalog_get_entry_group_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_GetEntryGroup_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_get_entry_group_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.get_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.GetEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "GetEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.GetEntryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.Entry", - "shortName": "get_entry" - }, - "description": "Sample for GetEntry", - "file": "datacatalog_v1beta1_generated_data_catalog_get_entry_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_GetEntry_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_get_entry_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.get_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.GetEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "GetEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.GetEntryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.Entry", - "shortName": "get_entry" - }, - "description": "Sample for GetEntry", - "file": "datacatalog_v1beta1_generated_data_catalog_get_entry_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_GetEntry_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_get_entry_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.get_iam_policy", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.GetIamPolicy", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "datacatalog_v1beta1_generated_data_catalog_get_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_GetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_get_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.get_iam_policy", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.GetIamPolicy", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "datacatalog_v1beta1_generated_data_catalog_get_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_GetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_get_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.get_tag_template", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.GetTagTemplate", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "GetTagTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.GetTagTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplate", - "shortName": "get_tag_template" - }, - "description": "Sample for GetTagTemplate", - "file": "datacatalog_v1beta1_generated_data_catalog_get_tag_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_GetTagTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_get_tag_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.get_tag_template", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.GetTagTemplate", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "GetTagTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.GetTagTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplate", - "shortName": "get_tag_template" - }, - "description": "Sample for GetTagTemplate", - "file": "datacatalog_v1beta1_generated_data_catalog_get_tag_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_GetTagTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_get_tag_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.list_entries", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.ListEntries", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "ListEntries" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.ListEntriesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListEntriesAsyncPager", - "shortName": "list_entries" - }, - "description": "Sample for ListEntries", - "file": "datacatalog_v1beta1_generated_data_catalog_list_entries_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_ListEntries_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_list_entries_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.list_entries", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.ListEntries", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "ListEntries" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.ListEntriesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListEntriesPager", - "shortName": "list_entries" - }, - "description": "Sample for ListEntries", - "file": "datacatalog_v1beta1_generated_data_catalog_list_entries_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_ListEntries_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_list_entries_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.list_entry_groups", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.ListEntryGroups", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "ListEntryGroups" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.ListEntryGroupsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListEntryGroupsAsyncPager", - "shortName": "list_entry_groups" - }, - "description": "Sample for ListEntryGroups", - "file": "datacatalog_v1beta1_generated_data_catalog_list_entry_groups_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_ListEntryGroups_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_list_entry_groups_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.list_entry_groups", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.ListEntryGroups", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "ListEntryGroups" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.ListEntryGroupsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListEntryGroupsPager", - "shortName": "list_entry_groups" - }, - "description": "Sample for ListEntryGroups", - "file": "datacatalog_v1beta1_generated_data_catalog_list_entry_groups_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_ListEntryGroups_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_list_entry_groups_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.list_tags", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.ListTags", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "ListTags" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.ListTagsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListTagsAsyncPager", - "shortName": "list_tags" - }, - "description": "Sample for ListTags", - "file": "datacatalog_v1beta1_generated_data_catalog_list_tags_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_ListTags_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_list_tags_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.list_tags", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.ListTags", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "ListTags" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.ListTagsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.ListTagsPager", - "shortName": "list_tags" - }, - "description": "Sample for ListTags", - "file": "datacatalog_v1beta1_generated_data_catalog_list_tags_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_ListTags_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_list_tags_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.lookup_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.LookupEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "LookupEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.LookupEntryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.Entry", - "shortName": "lookup_entry" - }, - "description": "Sample for LookupEntry", - "file": "datacatalog_v1beta1_generated_data_catalog_lookup_entry_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_LookupEntry_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_lookup_entry_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.lookup_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.LookupEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "LookupEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.LookupEntryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.Entry", - "shortName": "lookup_entry" - }, - "description": "Sample for LookupEntry", - "file": "datacatalog_v1beta1_generated_data_catalog_lookup_entry_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_LookupEntry_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_lookup_entry_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.rename_tag_template_field_enum_value", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.RenameTagTemplateFieldEnumValue", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "RenameTagTemplateFieldEnumValue" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.RenameTagTemplateFieldEnumValueRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "new_enum_value_display_name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplateField", - "shortName": "rename_tag_template_field_enum_value" - }, - "description": "Sample for RenameTagTemplateFieldEnumValue", - "file": "datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.rename_tag_template_field_enum_value", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.RenameTagTemplateFieldEnumValue", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "RenameTagTemplateFieldEnumValue" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.RenameTagTemplateFieldEnumValueRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "new_enum_value_display_name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplateField", - "shortName": "rename_tag_template_field_enum_value" - }, - "description": "Sample for RenameTagTemplateFieldEnumValue", - "file": "datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateFieldEnumValue_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_enum_value_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.rename_tag_template_field", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.RenameTagTemplateField", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "RenameTagTemplateField" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.RenameTagTemplateFieldRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "new_tag_template_field_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplateField", - "shortName": "rename_tag_template_field" - }, - "description": "Sample for RenameTagTemplateField", - "file": "datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateField_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.rename_tag_template_field", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.RenameTagTemplateField", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "RenameTagTemplateField" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.RenameTagTemplateFieldRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "new_tag_template_field_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplateField", - "shortName": "rename_tag_template_field" - }, - "description": "Sample for RenameTagTemplateField", - "file": "datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_RenameTagTemplateField_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_rename_tag_template_field_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.search_catalog", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "SearchCatalog" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.SearchCatalogRequest" - }, - { - "name": "scope", - "type": "google.cloud.datacatalog_v1beta1.types.SearchCatalogRequest.Scope" - }, - { - "name": "query", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.SearchCatalogAsyncPager", - "shortName": "search_catalog" - }, - "description": "Sample for SearchCatalog", - "file": "datacatalog_v1beta1_generated_data_catalog_search_catalog_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_SearchCatalog_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_search_catalog_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.search_catalog", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "SearchCatalog" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.SearchCatalogRequest" - }, - { - "name": "scope", - "type": "google.cloud.datacatalog_v1beta1.types.SearchCatalogRequest.Scope" - }, - { - "name": "query", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.services.data_catalog.pagers.SearchCatalogPager", - "shortName": "search_catalog" - }, - "description": "Sample for SearchCatalog", - "file": "datacatalog_v1beta1_generated_data_catalog_search_catalog_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_SearchCatalog_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_search_catalog_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.set_iam_policy", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.SetIamPolicy", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "datacatalog_v1beta1_generated_data_catalog_set_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_SetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_set_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.set_iam_policy", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.SetIamPolicy", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "datacatalog_v1beta1_generated_data_catalog_set_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_SetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_set_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.test_iam_permissions", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.TestIamPermissions", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_TestIamPermissions_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.test_iam_permissions", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.TestIamPermissions", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_TestIamPermissions_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_test_iam_permissions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.update_entry_group", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntryGroup", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "UpdateEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.UpdateEntryGroupRequest" - }, - { - "name": "entry_group", - "type": "google.cloud.datacatalog_v1beta1.types.EntryGroup" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.EntryGroup", - "shortName": "update_entry_group" - }, - "description": "Sample for UpdateEntryGroup", - "file": "datacatalog_v1beta1_generated_data_catalog_update_entry_group_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_UpdateEntryGroup_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_update_entry_group_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.update_entry_group", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntryGroup", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "UpdateEntryGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.UpdateEntryGroupRequest" - }, - { - "name": "entry_group", - "type": "google.cloud.datacatalog_v1beta1.types.EntryGroup" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.EntryGroup", - "shortName": "update_entry_group" - }, - "description": "Sample for UpdateEntryGroup", - "file": "datacatalog_v1beta1_generated_data_catalog_update_entry_group_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_UpdateEntryGroup_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_update_entry_group_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.update_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "UpdateEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.UpdateEntryRequest" - }, - { - "name": "entry", - "type": "google.cloud.datacatalog_v1beta1.types.Entry" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.Entry", - "shortName": "update_entry" - }, - "description": "Sample for UpdateEntry", - "file": "datacatalog_v1beta1_generated_data_catalog_update_entry_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_UpdateEntry_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_update_entry_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.update_entry", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntry", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "UpdateEntry" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.UpdateEntryRequest" - }, - { - "name": "entry", - "type": "google.cloud.datacatalog_v1beta1.types.Entry" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.Entry", - "shortName": "update_entry" - }, - "description": "Sample for UpdateEntry", - "file": "datacatalog_v1beta1_generated_data_catalog_update_entry_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_UpdateEntry_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_update_entry_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.update_tag_template_field", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplateField", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "UpdateTagTemplateField" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.UpdateTagTemplateFieldRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "tag_template_field", - "type": "google.cloud.datacatalog_v1beta1.types.TagTemplateField" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplateField", - "shortName": "update_tag_template_field" - }, - "description": "Sample for UpdateTagTemplateField", - "file": "datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplateField_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.update_tag_template_field", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplateField", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "UpdateTagTemplateField" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.UpdateTagTemplateFieldRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "tag_template_field", - "type": "google.cloud.datacatalog_v1beta1.types.TagTemplateField" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplateField", - "shortName": "update_tag_template_field" - }, - "description": "Sample for UpdateTagTemplateField", - "file": "datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplateField_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_update_tag_template_field_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.update_tag_template", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplate", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "UpdateTagTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.UpdateTagTemplateRequest" - }, - { - "name": "tag_template", - "type": "google.cloud.datacatalog_v1beta1.types.TagTemplate" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplate", - "shortName": "update_tag_template" - }, - "description": "Sample for UpdateTagTemplate", - "file": "datacatalog_v1beta1_generated_data_catalog_update_tag_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplate_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_update_tag_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.update_tag_template", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplate", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "UpdateTagTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.UpdateTagTemplateRequest" - }, - { - "name": "tag_template", - "type": "google.cloud.datacatalog_v1beta1.types.TagTemplate" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.TagTemplate", - "shortName": "update_tag_template" - }, - "description": "Sample for UpdateTagTemplate", - "file": "datacatalog_v1beta1_generated_data_catalog_update_tag_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_UpdateTagTemplate_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_update_tag_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient", - "shortName": "DataCatalogAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogAsyncClient.update_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTag", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "UpdateTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.UpdateTagRequest" - }, - { - "name": "tag", - "type": "google.cloud.datacatalog_v1beta1.types.Tag" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.Tag", - "shortName": "update_tag" - }, - "description": "Sample for UpdateTag", - "file": "datacatalog_v1beta1_generated_data_catalog_update_tag_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_UpdateTag_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_update_tag_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient", - "shortName": "DataCatalogClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.DataCatalogClient.update_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTag", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.DataCatalog", - "shortName": "DataCatalog" - }, - "shortName": "UpdateTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.UpdateTagRequest" - }, - { - "name": "tag", - "type": "google.cloud.datacatalog_v1beta1.types.Tag" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.Tag", - "shortName": "update_tag" - }, - "description": "Sample for UpdateTag", - "file": "datacatalog_v1beta1_generated_data_catalog_update_tag_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_DataCatalog_UpdateTag_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_data_catalog_update_tag_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerSerializationAsyncClient", - "shortName": "PolicyTagManagerSerializationAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerSerializationAsyncClient.export_taxonomies", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ExportTaxonomies", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization", - "shortName": "PolicyTagManagerSerialization" - }, - "shortName": "ExportTaxonomies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.ExportTaxonomiesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.ExportTaxonomiesResponse", - "shortName": "export_taxonomies" - }, - "description": "Sample for ExportTaxonomies", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ExportTaxonomies_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerSerializationClient", - "shortName": "PolicyTagManagerSerializationClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerSerializationClient.export_taxonomies", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ExportTaxonomies", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization", - "shortName": "PolicyTagManagerSerialization" - }, - "shortName": "ExportTaxonomies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.ExportTaxonomiesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.ExportTaxonomiesResponse", - "shortName": "export_taxonomies" - }, - "description": "Sample for ExportTaxonomies", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ExportTaxonomies_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_serialization_export_taxonomies_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerSerializationAsyncClient", - "shortName": "PolicyTagManagerSerializationAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerSerializationAsyncClient.import_taxonomies", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ImportTaxonomies", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization", - "shortName": "PolicyTagManagerSerialization" - }, - "shortName": "ImportTaxonomies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.ImportTaxonomiesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.ImportTaxonomiesResponse", - "shortName": "import_taxonomies" - }, - "description": "Sample for ImportTaxonomies", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ImportTaxonomies_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerSerializationClient", - "shortName": "PolicyTagManagerSerializationClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerSerializationClient.import_taxonomies", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization.ImportTaxonomies", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManagerSerialization", - "shortName": "PolicyTagManagerSerialization" - }, - "shortName": "ImportTaxonomies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.ImportTaxonomiesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.ImportTaxonomiesResponse", - "shortName": "import_taxonomies" - }, - "description": "Sample for ImportTaxonomies", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManagerSerialization_ImportTaxonomies_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_serialization_import_taxonomies_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.create_policy_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.CreatePolicyTag", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "CreatePolicyTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.CreatePolicyTagRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "policy_tag", - "type": "google.cloud.datacatalog_v1beta1.types.PolicyTag" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.PolicyTag", - "shortName": "create_policy_tag" - }, - "description": "Sample for CreatePolicyTag", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_CreatePolicyTag_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.create_policy_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.CreatePolicyTag", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "CreatePolicyTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.CreatePolicyTagRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "policy_tag", - "type": "google.cloud.datacatalog_v1beta1.types.PolicyTag" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.PolicyTag", - "shortName": "create_policy_tag" - }, - "description": "Sample for CreatePolicyTag", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_CreatePolicyTag_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_create_policy_tag_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.create_taxonomy", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.CreateTaxonomy", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "CreateTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.CreateTaxonomyRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "taxonomy", - "type": "google.cloud.datacatalog_v1beta1.types.Taxonomy" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.Taxonomy", - "shortName": "create_taxonomy" - }, - "description": "Sample for CreateTaxonomy", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_CreateTaxonomy_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.create_taxonomy", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.CreateTaxonomy", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "CreateTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.CreateTaxonomyRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "taxonomy", - "type": "google.cloud.datacatalog_v1beta1.types.Taxonomy" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.Taxonomy", - "shortName": "create_taxonomy" - }, - "description": "Sample for CreateTaxonomy", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_CreateTaxonomy_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_create_taxonomy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.delete_policy_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.DeletePolicyTag", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "DeletePolicyTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.DeletePolicyTagRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_policy_tag" - }, - "description": "Sample for DeletePolicyTag", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_DeletePolicyTag_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.delete_policy_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.DeletePolicyTag", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "DeletePolicyTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.DeletePolicyTagRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_policy_tag" - }, - "description": "Sample for DeletePolicyTag", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_DeletePolicyTag_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_delete_policy_tag_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.delete_taxonomy", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.DeleteTaxonomy", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "DeleteTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.DeleteTaxonomyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_taxonomy" - }, - "description": "Sample for DeleteTaxonomy", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_DeleteTaxonomy_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.delete_taxonomy", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.DeleteTaxonomy", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "DeleteTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.DeleteTaxonomyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_taxonomy" - }, - "description": "Sample for DeleteTaxonomy", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_DeleteTaxonomy_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_delete_taxonomy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.get_iam_policy", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.GetIamPolicy", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_GetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.get_iam_policy", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.GetIamPolicy", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_GetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_get_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.get_policy_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.GetPolicyTag", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "GetPolicyTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.GetPolicyTagRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.PolicyTag", - "shortName": "get_policy_tag" - }, - "description": "Sample for GetPolicyTag", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_GetPolicyTag_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.get_policy_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.GetPolicyTag", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "GetPolicyTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.GetPolicyTagRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.PolicyTag", - "shortName": "get_policy_tag" - }, - "description": "Sample for GetPolicyTag", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_GetPolicyTag_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_get_policy_tag_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.get_taxonomy", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.GetTaxonomy", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "GetTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.GetTaxonomyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.Taxonomy", - "shortName": "get_taxonomy" - }, - "description": "Sample for GetTaxonomy", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_GetTaxonomy_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.get_taxonomy", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.GetTaxonomy", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "GetTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.GetTaxonomyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.Taxonomy", - "shortName": "get_taxonomy" - }, - "description": "Sample for GetTaxonomy", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_GetTaxonomy_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_get_taxonomy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.list_policy_tags", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.ListPolicyTags", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "ListPolicyTags" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.ListPolicyTagsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.services.policy_tag_manager.pagers.ListPolicyTagsAsyncPager", - "shortName": "list_policy_tags" - }, - "description": "Sample for ListPolicyTags", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_ListPolicyTags_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.list_policy_tags", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.ListPolicyTags", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "ListPolicyTags" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.ListPolicyTagsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.services.policy_tag_manager.pagers.ListPolicyTagsPager", - "shortName": "list_policy_tags" - }, - "description": "Sample for ListPolicyTags", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_ListPolicyTags_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_list_policy_tags_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.list_taxonomies", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.ListTaxonomies", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "ListTaxonomies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.ListTaxonomiesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.services.policy_tag_manager.pagers.ListTaxonomiesAsyncPager", - "shortName": "list_taxonomies" - }, - "description": "Sample for ListTaxonomies", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_ListTaxonomies_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.list_taxonomies", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.ListTaxonomies", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "ListTaxonomies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.ListTaxonomiesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.services.policy_tag_manager.pagers.ListTaxonomiesPager", - "shortName": "list_taxonomies" - }, - "description": "Sample for ListTaxonomies", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_ListTaxonomies_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_list_taxonomies_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.set_iam_policy", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.SetIamPolicy", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_SetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.set_iam_policy", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.SetIamPolicy", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_SetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_set_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.test_iam_permissions", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.TestIamPermissions", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_TestIamPermissions_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.test_iam_permissions", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.TestIamPermissions", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_TestIamPermissions_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_test_iam_permissions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.update_policy_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.UpdatePolicyTag", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "UpdatePolicyTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.UpdatePolicyTagRequest" - }, - { - "name": "policy_tag", - "type": "google.cloud.datacatalog_v1beta1.types.PolicyTag" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.PolicyTag", - "shortName": "update_policy_tag" - }, - "description": "Sample for UpdatePolicyTag", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_UpdatePolicyTag_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.update_policy_tag", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.UpdatePolicyTag", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "UpdatePolicyTag" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.UpdatePolicyTagRequest" - }, - { - "name": "policy_tag", - "type": "google.cloud.datacatalog_v1beta1.types.PolicyTag" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.PolicyTag", - "shortName": "update_policy_tag" - }, - "description": "Sample for UpdatePolicyTag", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_UpdatePolicyTag_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_update_policy_tag_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient", - "shortName": "PolicyTagManagerAsyncClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerAsyncClient.update_taxonomy", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.UpdateTaxonomy", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "UpdateTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.UpdateTaxonomyRequest" - }, - { - "name": "taxonomy", - "type": "google.cloud.datacatalog_v1beta1.types.Taxonomy" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.Taxonomy", - "shortName": "update_taxonomy" - }, - "description": "Sample for UpdateTaxonomy", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_UpdateTaxonomy_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient", - "shortName": "PolicyTagManagerClient" - }, - "fullName": "google.cloud.datacatalog_v1beta1.PolicyTagManagerClient.update_taxonomy", - "method": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager.UpdateTaxonomy", - "service": { - "fullName": "google.cloud.datacatalog.v1beta1.PolicyTagManager", - "shortName": "PolicyTagManager" - }, - "shortName": "UpdateTaxonomy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.datacatalog_v1beta1.types.UpdateTaxonomyRequest" - }, - { - "name": "taxonomy", - "type": "google.cloud.datacatalog_v1beta1.types.Taxonomy" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.datacatalog_v1beta1.types.Taxonomy", - "shortName": "update_taxonomy" - }, - "description": "Sample for UpdateTaxonomy", - "file": "datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "datacatalog_v1beta1_generated_PolicyTagManager_UpdateTaxonomy_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "datacatalog_v1beta1_generated_policy_tag_manager_update_taxonomy_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/scripts/fixup_datacatalog_v1beta1_keywords.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/scripts/fixup_datacatalog_v1beta1_keywords.py deleted file mode 100644 index a490f699057a..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/scripts/fixup_datacatalog_v1beta1_keywords.py +++ /dev/null @@ -1,215 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class datacatalogCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_entry': ('parent', 'entry_id', 'entry', ), - 'create_entry_group': ('parent', 'entry_group_id', 'entry_group', ), - 'create_policy_tag': ('parent', 'policy_tag', ), - 'create_tag': ('parent', 'tag', ), - 'create_tag_template': ('parent', 'tag_template_id', 'tag_template', ), - 'create_tag_template_field': ('parent', 'tag_template_field_id', 'tag_template_field', ), - 'create_taxonomy': ('parent', 'taxonomy', ), - 'delete_entry': ('name', ), - 'delete_entry_group': ('name', 'force', ), - 'delete_policy_tag': ('name', ), - 'delete_tag': ('name', ), - 'delete_tag_template': ('name', 'force', ), - 'delete_tag_template_field': ('name', 'force', ), - 'delete_taxonomy': ('name', ), - 'export_taxonomies': ('parent', 'taxonomies', 'serialized_taxonomies', ), - 'get_entry': ('name', ), - 'get_entry_group': ('name', 'read_mask', ), - 'get_iam_policy': ('resource', 'options', ), - 'get_policy_tag': ('name', ), - 'get_tag_template': ('name', ), - 'get_taxonomy': ('name', ), - 'import_taxonomies': ('parent', 'inline_source', ), - 'list_entries': ('parent', 'page_size', 'page_token', 'read_mask', ), - 'list_entry_groups': ('parent', 'page_size', 'page_token', ), - 'list_policy_tags': ('parent', 'page_size', 'page_token', ), - 'list_tags': ('parent', 'page_size', 'page_token', ), - 'list_taxonomies': ('parent', 'page_size', 'page_token', 'filter', ), - 'lookup_entry': ('linked_resource', 'sql_resource', ), - 'rename_tag_template_field': ('name', 'new_tag_template_field_id', ), - 'rename_tag_template_field_enum_value': ('name', 'new_enum_value_display_name', ), - 'search_catalog': ('scope', 'query', 'page_size', 'page_token', 'order_by', ), - 'set_iam_policy': ('resource', 'policy', 'update_mask', ), - 'test_iam_permissions': ('resource', 'permissions', ), - 'update_entry': ('entry', 'update_mask', ), - 'update_entry_group': ('entry_group', 'update_mask', ), - 'update_policy_tag': ('policy_tag', 'update_mask', ), - 'update_tag': ('tag', 'update_mask', ), - 'update_tag_template': ('tag_template', 'update_mask', ), - 'update_tag_template_field': ('name', 'tag_template_field', 'update_mask', ), - 'update_taxonomy': ('taxonomy', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=datacatalogCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the datacatalog client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/setup.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/setup.py deleted file mode 100644 index 1d59854d3bcb..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/setup.py +++ /dev/null @@ -1,91 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-datacatalog' - - -description = "Google Cloud Datacatalog API client library" - -version = {} -with open(os.path.join(package_root, 'google/cloud/datacatalog/gapic_version.py')) as fp: - exec(fp.read(), version) -version = version["__version__"] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", -] -url = "https://github.com/googleapis/python-datacatalog" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.PEP420PackageFinder.find() - if package.startswith("google") -] - -namespaces = ["google", "google.cloud"] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - namespace_packages=namespaces, - install_requires=dependencies, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.10.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.10.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.11.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.11.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.12.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.12.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.7.txt deleted file mode 100644 index 2beecf99e0be..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 -proto-plus==1.22.0 -protobuf==3.19.5 -grpc-google-iam-v1==0.12.4 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.8.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.8.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.9.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/testing/constraints-3.9.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/__init__.py deleted file mode 100644 index 1b4db446eb8d..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/__init__.py deleted file mode 100644 index 1b4db446eb8d..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/__init__.py deleted file mode 100644 index 1b4db446eb8d..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/__init__.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/__init__.py deleted file mode 100644 index 1b4db446eb8d..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py deleted file mode 100644 index 5da43c8578c3..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py +++ /dev/null @@ -1,8709 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.datacatalog_v1beta1.services.data_catalog import DataCatalogAsyncClient -from google.cloud.datacatalog_v1beta1.services.data_catalog import DataCatalogClient -from google.cloud.datacatalog_v1beta1.services.data_catalog import pagers -from google.cloud.datacatalog_v1beta1.services.data_catalog import transports -from google.cloud.datacatalog_v1beta1.types import common -from google.cloud.datacatalog_v1beta1.types import datacatalog -from google.cloud.datacatalog_v1beta1.types import gcs_fileset_spec -from google.cloud.datacatalog_v1beta1.types import schema -from google.cloud.datacatalog_v1beta1.types import search -from google.cloud.datacatalog_v1beta1.types import table_spec -from google.cloud.datacatalog_v1beta1.types import tags -from google.cloud.datacatalog_v1beta1.types import timestamps -from google.cloud.datacatalog_v1beta1.types import usage -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert DataCatalogClient._get_default_mtls_endpoint(None) is None - assert DataCatalogClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert DataCatalogClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert DataCatalogClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert DataCatalogClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert DataCatalogClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataCatalogClient, "grpc"), - (DataCatalogAsyncClient, "grpc_asyncio"), -]) -def test_data_catalog_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'datacatalog.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.DataCatalogGrpcTransport, "grpc"), - (transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_data_catalog_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataCatalogClient, "grpc"), - (DataCatalogAsyncClient, "grpc_asyncio"), -]) -def test_data_catalog_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'datacatalog.googleapis.com:443' - ) - - -def test_data_catalog_client_get_transport_class(): - transport = DataCatalogClient.get_transport_class() - available_transports = [ - transports.DataCatalogGrpcTransport, - ] - assert transport in available_transports - - transport = DataCatalogClient.get_transport_class("grpc") - assert transport == transports.DataCatalogGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc"), - (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(DataCatalogClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogClient)) -@mock.patch.object(DataCatalogAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogAsyncClient)) -def test_data_catalog_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(DataCatalogClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(DataCatalogClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", "true"), - (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", "false"), - (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(DataCatalogClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogClient)) -@mock.patch.object(DataCatalogAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_data_catalog_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - DataCatalogClient, DataCatalogAsyncClient -]) -@mock.patch.object(DataCatalogClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogClient)) -@mock.patch.object(DataCatalogAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogAsyncClient)) -def test_data_catalog_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc"), - (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_data_catalog_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", grpc_helpers), - (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_data_catalog_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_data_catalog_client_client_options_from_dict(): - with mock.patch('google.cloud.datacatalog_v1beta1.services.data_catalog.transports.DataCatalogGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = DataCatalogClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", grpc_helpers), - (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_data_catalog_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "datacatalog.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="datacatalog.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.SearchCatalogRequest, - dict, -]) -def test_search_catalog(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.SearchCatalogResponse( - total_size=1086, - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.search_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.SearchCatalogRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchCatalogPager) - assert response.total_size == 1086 - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_search_catalog_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_catalog), - '__call__') as call: - client.search_catalog() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.SearchCatalogRequest() - -@pytest.mark.asyncio -async def test_search_catalog_async(transport: str = 'grpc_asyncio', request_type=datacatalog.SearchCatalogRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.SearchCatalogResponse( - total_size=1086, - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.search_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.SearchCatalogRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchCatalogAsyncPager) - assert response.total_size == 1086 - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_search_catalog_async_from_dict(): - await test_search_catalog_async(request_type=dict) - - -def test_search_catalog_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.SearchCatalogResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.search_catalog( - scope=datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']), - query='query_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].scope - mock_val = datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']) - assert arg == mock_val - arg = args[0].query - mock_val = 'query_value' - assert arg == mock_val - - -def test_search_catalog_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_catalog( - datacatalog.SearchCatalogRequest(), - scope=datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']), - query='query_value', - ) - -@pytest.mark.asyncio -async def test_search_catalog_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.SearchCatalogResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.SearchCatalogResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.search_catalog( - scope=datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']), - query='query_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].scope - mock_val = datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']) - assert arg == mock_val - arg = args[0].query - mock_val = 'query_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_search_catalog_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.search_catalog( - datacatalog.SearchCatalogRequest(), - scope=datacatalog.SearchCatalogRequest.Scope(include_org_ids=['include_org_ids_value']), - query='query_value', - ) - - -def test_search_catalog_pager(transport_name: str = "grpc"): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_catalog), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - search.SearchCatalogResult(), - search.SearchCatalogResult(), - ], - next_page_token='abc', - ), - datacatalog.SearchCatalogResponse( - results=[], - next_page_token='def', - ), - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - ], - next_page_token='ghi', - ), - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - search.SearchCatalogResult(), - ], - ), - RuntimeError, - ) - - metadata = () - pager = client.search_catalog(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, search.SearchCatalogResult) - for i in results) -def test_search_catalog_pages(transport_name: str = "grpc"): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_catalog), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - search.SearchCatalogResult(), - search.SearchCatalogResult(), - ], - next_page_token='abc', - ), - datacatalog.SearchCatalogResponse( - results=[], - next_page_token='def', - ), - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - ], - next_page_token='ghi', - ), - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - search.SearchCatalogResult(), - ], - ), - RuntimeError, - ) - pages = list(client.search_catalog(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_search_catalog_async_pager(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_catalog), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - search.SearchCatalogResult(), - search.SearchCatalogResult(), - ], - next_page_token='abc', - ), - datacatalog.SearchCatalogResponse( - results=[], - next_page_token='def', - ), - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - ], - next_page_token='ghi', - ), - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - search.SearchCatalogResult(), - ], - ), - RuntimeError, - ) - async_pager = await client.search_catalog(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, search.SearchCatalogResult) - for i in responses) - - -@pytest.mark.asyncio -async def test_search_catalog_async_pages(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_catalog), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - search.SearchCatalogResult(), - search.SearchCatalogResult(), - ], - next_page_token='abc', - ), - datacatalog.SearchCatalogResponse( - results=[], - next_page_token='def', - ), - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - ], - next_page_token='ghi', - ), - datacatalog.SearchCatalogResponse( - results=[ - search.SearchCatalogResult(), - search.SearchCatalogResult(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.search_catalog(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - datacatalog.CreateEntryGroupRequest, - dict, -]) -def test_create_entry_group(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.EntryGroup( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.create_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.EntryGroup) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_create_entry_group_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - client.create_entry_group() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryGroupRequest() - -@pytest.mark.asyncio -async def test_create_entry_group_async(transport: str = 'grpc_asyncio', request_type=datacatalog.CreateEntryGroupRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.create_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.EntryGroup) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_create_entry_group_async_from_dict(): - await test_create_entry_group_async(request_type=dict) - - -def test_create_entry_group_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.CreateEntryGroupRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - call.return_value = datacatalog.EntryGroup() - client.create_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_entry_group_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.CreateEntryGroupRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) - await client.create_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_entry_group_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.EntryGroup() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_entry_group( - parent='parent_value', - entry_group_id='entry_group_id_value', - entry_group=datacatalog.EntryGroup(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entry_group_id - mock_val = 'entry_group_id_value' - assert arg == mock_val - arg = args[0].entry_group - mock_val = datacatalog.EntryGroup(name='name_value') - assert arg == mock_val - - -def test_create_entry_group_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_entry_group( - datacatalog.CreateEntryGroupRequest(), - parent='parent_value', - entry_group_id='entry_group_id_value', - entry_group=datacatalog.EntryGroup(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_entry_group_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.EntryGroup() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_entry_group( - parent='parent_value', - entry_group_id='entry_group_id_value', - entry_group=datacatalog.EntryGroup(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entry_group_id - mock_val = 'entry_group_id_value' - assert arg == mock_val - arg = args[0].entry_group - mock_val = datacatalog.EntryGroup(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_entry_group_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_entry_group( - datacatalog.CreateEntryGroupRequest(), - parent='parent_value', - entry_group_id='entry_group_id_value', - entry_group=datacatalog.EntryGroup(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.UpdateEntryGroupRequest, - dict, -]) -def test_update_entry_group(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.EntryGroup( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.update_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.EntryGroup) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_update_entry_group_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - client.update_entry_group() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryGroupRequest() - -@pytest.mark.asyncio -async def test_update_entry_group_async(transport: str = 'grpc_asyncio', request_type=datacatalog.UpdateEntryGroupRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.update_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.EntryGroup) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_update_entry_group_async_from_dict(): - await test_update_entry_group_async(request_type=dict) - - -def test_update_entry_group_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.UpdateEntryGroupRequest() - - request.entry_group.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - call.return_value = datacatalog.EntryGroup() - client.update_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entry_group.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_entry_group_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.UpdateEntryGroupRequest() - - request.entry_group.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) - await client.update_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entry_group.name=name_value', - ) in kw['metadata'] - - -def test_update_entry_group_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.EntryGroup() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_entry_group( - entry_group=datacatalog.EntryGroup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].entry_group - mock_val = datacatalog.EntryGroup(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_entry_group_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_entry_group( - datacatalog.UpdateEntryGroupRequest(), - entry_group=datacatalog.EntryGroup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_entry_group_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.EntryGroup() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_entry_group( - entry_group=datacatalog.EntryGroup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].entry_group - mock_val = datacatalog.EntryGroup(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_entry_group_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_entry_group( - datacatalog.UpdateEntryGroupRequest(), - entry_group=datacatalog.EntryGroup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.GetEntryGroupRequest, - dict, -]) -def test_get_entry_group(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.EntryGroup( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.get_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.EntryGroup) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_get_entry_group_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - client.get_entry_group() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryGroupRequest() - -@pytest.mark.asyncio -async def test_get_entry_group_async(transport: str = 'grpc_asyncio', request_type=datacatalog.GetEntryGroupRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.get_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.EntryGroup) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_get_entry_group_async_from_dict(): - await test_get_entry_group_async(request_type=dict) - - -def test_get_entry_group_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.GetEntryGroupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - call.return_value = datacatalog.EntryGroup() - client.get_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_entry_group_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.GetEntryGroupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) - await client.get_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_entry_group_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.EntryGroup() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_entry_group( - name='name_value', - read_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].read_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_get_entry_group_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_entry_group( - datacatalog.GetEntryGroupRequest(), - name='name_value', - read_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_get_entry_group_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.EntryGroup() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.EntryGroup()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_entry_group( - name='name_value', - read_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].read_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_entry_group_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_entry_group( - datacatalog.GetEntryGroupRequest(), - name='name_value', - read_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.DeleteEntryGroupRequest, - dict, -]) -def test_delete_entry_group(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryGroupRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_entry_group_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - client.delete_entry_group() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryGroupRequest() - -@pytest.mark.asyncio -async def test_delete_entry_group_async(transport: str = 'grpc_asyncio', request_type=datacatalog.DeleteEntryGroupRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryGroupRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_entry_group_async_from_dict(): - await test_delete_entry_group_async(request_type=dict) - - -def test_delete_entry_group_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.DeleteEntryGroupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - call.return_value = None - client.delete_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_entry_group_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.DeleteEntryGroupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_entry_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_entry_group_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_entry_group( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_entry_group_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_entry_group( - datacatalog.DeleteEntryGroupRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_entry_group_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_entry_group( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_entry_group_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_entry_group( - datacatalog.DeleteEntryGroupRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.ListEntryGroupsRequest, - dict, -]) -def test_list_entry_groups(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.ListEntryGroupsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_entry_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntryGroupsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntryGroupsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_entry_groups_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - client.list_entry_groups() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntryGroupsRequest() - -@pytest.mark.asyncio -async def test_list_entry_groups_async(transport: str = 'grpc_asyncio', request_type=datacatalog.ListEntryGroupsRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntryGroupsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_entry_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntryGroupsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntryGroupsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_entry_groups_async_from_dict(): - await test_list_entry_groups_async(request_type=dict) - - -def test_list_entry_groups_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.ListEntryGroupsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - call.return_value = datacatalog.ListEntryGroupsResponse() - client.list_entry_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_entry_groups_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.ListEntryGroupsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntryGroupsResponse()) - await client.list_entry_groups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_entry_groups_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.ListEntryGroupsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_entry_groups( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_entry_groups_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_entry_groups( - datacatalog.ListEntryGroupsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_entry_groups_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.ListEntryGroupsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntryGroupsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_entry_groups( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_entry_groups_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_entry_groups( - datacatalog.ListEntryGroupsRequest(), - parent='parent_value', - ) - - -def test_list_entry_groups_pager(transport_name: str = "grpc"): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - ], - next_page_token='abc', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[], - next_page_token='def', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - ], - next_page_token='ghi', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_entry_groups(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, datacatalog.EntryGroup) - for i in results) -def test_list_entry_groups_pages(transport_name: str = "grpc"): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - ], - next_page_token='abc', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[], - next_page_token='def', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - ], - next_page_token='ghi', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - ], - ), - RuntimeError, - ) - pages = list(client.list_entry_groups(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_entry_groups_async_pager(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - ], - next_page_token='abc', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[], - next_page_token='def', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - ], - next_page_token='ghi', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_entry_groups(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, datacatalog.EntryGroup) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_entry_groups_async_pages(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - ], - next_page_token='abc', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[], - next_page_token='def', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - ], - next_page_token='ghi', - ), - datacatalog.ListEntryGroupsResponse( - entry_groups=[ - datacatalog.EntryGroup(), - datacatalog.EntryGroup(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_entry_groups(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - datacatalog.CreateEntryRequest, - dict, -]) -def test_create_entry(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.Entry( - name='name_value', - linked_resource='linked_resource_value', - display_name='display_name_value', - description='description_value', - type_=datacatalog.EntryType.TABLE, - integrated_system=common.IntegratedSystem.BIGQUERY, - ) - response = client.create_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == 'name_value' - assert response.linked_resource == 'linked_resource_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_create_entry_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - client.create_entry() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryRequest() - -@pytest.mark.asyncio -async def test_create_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.CreateEntryRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry( - name='name_value', - linked_resource='linked_resource_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.create_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateEntryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == 'name_value' - assert response.linked_resource == 'linked_resource_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_create_entry_async_from_dict(): - await test_create_entry_async(request_type=dict) - - -def test_create_entry_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.CreateEntryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - call.return_value = datacatalog.Entry() - client.create_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_entry_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.CreateEntryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) - await client.create_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_entry_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.Entry() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_entry( - parent='parent_value', - entry_id='entry_id_value', - entry=datacatalog.Entry(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entry_id - mock_val = 'entry_id_value' - assert arg == mock_val - arg = args[0].entry - mock_val = datacatalog.Entry(name='name_value') - assert arg == mock_val - - -def test_create_entry_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_entry( - datacatalog.CreateEntryRequest(), - parent='parent_value', - entry_id='entry_id_value', - entry=datacatalog.Entry(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_entry_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.Entry() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_entry( - parent='parent_value', - entry_id='entry_id_value', - entry=datacatalog.Entry(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].entry_id - mock_val = 'entry_id_value' - assert arg == mock_val - arg = args[0].entry - mock_val = datacatalog.Entry(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_entry_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_entry( - datacatalog.CreateEntryRequest(), - parent='parent_value', - entry_id='entry_id_value', - entry=datacatalog.Entry(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.UpdateEntryRequest, - dict, -]) -def test_update_entry(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.Entry( - name='name_value', - linked_resource='linked_resource_value', - display_name='display_name_value', - description='description_value', - type_=datacatalog.EntryType.TABLE, - integrated_system=common.IntegratedSystem.BIGQUERY, - ) - response = client.update_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == 'name_value' - assert response.linked_resource == 'linked_resource_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_update_entry_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - client.update_entry() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryRequest() - -@pytest.mark.asyncio -async def test_update_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.UpdateEntryRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry( - name='name_value', - linked_resource='linked_resource_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.update_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateEntryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == 'name_value' - assert response.linked_resource == 'linked_resource_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_update_entry_async_from_dict(): - await test_update_entry_async(request_type=dict) - - -def test_update_entry_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.UpdateEntryRequest() - - request.entry.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - call.return_value = datacatalog.Entry() - client.update_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entry.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_entry_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.UpdateEntryRequest() - - request.entry.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) - await client.update_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'entry.name=name_value', - ) in kw['metadata'] - - -def test_update_entry_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.Entry() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_entry( - entry=datacatalog.Entry(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].entry - mock_val = datacatalog.Entry(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_entry_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_entry( - datacatalog.UpdateEntryRequest(), - entry=datacatalog.Entry(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_entry_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.Entry() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_entry( - entry=datacatalog.Entry(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].entry - mock_val = datacatalog.Entry(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_entry_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_entry( - datacatalog.UpdateEntryRequest(), - entry=datacatalog.Entry(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.DeleteEntryRequest, - dict, -]) -def test_delete_entry(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_entry_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - client.delete_entry() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryRequest() - -@pytest.mark.asyncio -async def test_delete_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.DeleteEntryRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteEntryRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_entry_async_from_dict(): - await test_delete_entry_async(request_type=dict) - - -def test_delete_entry_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.DeleteEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - call.return_value = None - client.delete_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_entry_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.DeleteEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_entry_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_entry( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_entry_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_entry( - datacatalog.DeleteEntryRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_entry_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_entry( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_entry_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_entry( - datacatalog.DeleteEntryRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.GetEntryRequest, - dict, -]) -def test_get_entry(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.Entry( - name='name_value', - linked_resource='linked_resource_value', - display_name='display_name_value', - description='description_value', - type_=datacatalog.EntryType.TABLE, - integrated_system=common.IntegratedSystem.BIGQUERY, - ) - response = client.get_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == 'name_value' - assert response.linked_resource == 'linked_resource_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_get_entry_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - client.get_entry() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryRequest() - -@pytest.mark.asyncio -async def test_get_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.GetEntryRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry( - name='name_value', - linked_resource='linked_resource_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.get_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetEntryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == 'name_value' - assert response.linked_resource == 'linked_resource_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_get_entry_async_from_dict(): - await test_get_entry_async(request_type=dict) - - -def test_get_entry_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.GetEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - call.return_value = datacatalog.Entry() - client.get_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_entry_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.GetEntryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) - await client.get_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_entry_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.Entry() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_entry( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_entry_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_entry( - datacatalog.GetEntryRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_entry_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.Entry() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_entry( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_entry_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_entry( - datacatalog.GetEntryRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.LookupEntryRequest, - dict, -]) -def test_lookup_entry(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.Entry( - name='name_value', - linked_resource='linked_resource_value', - display_name='display_name_value', - description='description_value', - type_=datacatalog.EntryType.TABLE, - integrated_system=common.IntegratedSystem.BIGQUERY, - ) - response = client.lookup_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.LookupEntryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == 'name_value' - assert response.linked_resource == 'linked_resource_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_lookup_entry_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_entry), - '__call__') as call: - client.lookup_entry() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.LookupEntryRequest() - -@pytest.mark.asyncio -async def test_lookup_entry_async(transport: str = 'grpc_asyncio', request_type=datacatalog.LookupEntryRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.lookup_entry), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry( - name='name_value', - linked_resource='linked_resource_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.lookup_entry(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.LookupEntryRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, datacatalog.Entry) - assert response.name == 'name_value' - assert response.linked_resource == 'linked_resource_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_lookup_entry_async_from_dict(): - await test_lookup_entry_async(request_type=dict) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.ListEntriesRequest, - dict, -]) -def test_list_entries(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.ListEntriesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntriesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_entries_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - client.list_entries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntriesRequest() - -@pytest.mark.asyncio -async def test_list_entries_async(transport: str = 'grpc_asyncio', request_type=datacatalog.ListEntriesRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntriesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListEntriesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntriesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_entries_async_from_dict(): - await test_list_entries_async(request_type=dict) - - -def test_list_entries_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.ListEntriesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - call.return_value = datacatalog.ListEntriesResponse() - client.list_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_entries_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.ListEntriesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntriesResponse()) - await client.list_entries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_entries_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.ListEntriesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_entries( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_entries_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_entries( - datacatalog.ListEntriesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_entries_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.ListEntriesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListEntriesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_entries( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_entries_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_entries( - datacatalog.ListEntriesRequest(), - parent='parent_value', - ) - - -def test_list_entries_pager(transport_name: str = "grpc"): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - datacatalog.Entry(), - datacatalog.Entry(), - ], - next_page_token='abc', - ), - datacatalog.ListEntriesResponse( - entries=[], - next_page_token='def', - ), - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - ], - next_page_token='ghi', - ), - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - datacatalog.Entry(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_entries(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, datacatalog.Entry) - for i in results) -def test_list_entries_pages(transport_name: str = "grpc"): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - datacatalog.Entry(), - datacatalog.Entry(), - ], - next_page_token='abc', - ), - datacatalog.ListEntriesResponse( - entries=[], - next_page_token='def', - ), - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - ], - next_page_token='ghi', - ), - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - datacatalog.Entry(), - ], - ), - RuntimeError, - ) - pages = list(client.list_entries(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_entries_async_pager(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - datacatalog.Entry(), - datacatalog.Entry(), - ], - next_page_token='abc', - ), - datacatalog.ListEntriesResponse( - entries=[], - next_page_token='def', - ), - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - ], - next_page_token='ghi', - ), - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - datacatalog.Entry(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_entries(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, datacatalog.Entry) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_entries_async_pages(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entries), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - datacatalog.Entry(), - datacatalog.Entry(), - ], - next_page_token='abc', - ), - datacatalog.ListEntriesResponse( - entries=[], - next_page_token='def', - ), - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - ], - next_page_token='ghi', - ), - datacatalog.ListEntriesResponse( - entries=[ - datacatalog.Entry(), - datacatalog.Entry(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_entries(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - datacatalog.CreateTagTemplateRequest, - dict, -]) -def test_create_tag_template(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplate( - name='name_value', - display_name='display_name_value', - ) - response = client.create_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - - -def test_create_tag_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template), - '__call__') as call: - client.create_tag_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateRequest() - -@pytest.mark.asyncio -async def test_create_tag_template_async(transport: str = 'grpc_asyncio', request_type=datacatalog.CreateTagTemplateRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate( - name='name_value', - display_name='display_name_value', - )) - response = await client.create_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - - -@pytest.mark.asyncio -async def test_create_tag_template_async_from_dict(): - await test_create_tag_template_async(request_type=dict) - - -def test_create_tag_template_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.CreateTagTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template), - '__call__') as call: - call.return_value = tags.TagTemplate() - client.create_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_tag_template_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.CreateTagTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) - await client.create_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_tag_template_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_tag_template( - parent='parent_value', - tag_template_id='tag_template_id_value', - tag_template=tags.TagTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].tag_template_id - mock_val = 'tag_template_id_value' - assert arg == mock_val - arg = args[0].tag_template - mock_val = tags.TagTemplate(name='name_value') - assert arg == mock_val - - -def test_create_tag_template_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_tag_template( - datacatalog.CreateTagTemplateRequest(), - parent='parent_value', - tag_template_id='tag_template_id_value', - tag_template=tags.TagTemplate(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_tag_template_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_tag_template( - parent='parent_value', - tag_template_id='tag_template_id_value', - tag_template=tags.TagTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].tag_template_id - mock_val = 'tag_template_id_value' - assert arg == mock_val - arg = args[0].tag_template - mock_val = tags.TagTemplate(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_tag_template_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_tag_template( - datacatalog.CreateTagTemplateRequest(), - parent='parent_value', - tag_template_id='tag_template_id_value', - tag_template=tags.TagTemplate(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.GetTagTemplateRequest, - dict, -]) -def test_get_tag_template(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplate( - name='name_value', - display_name='display_name_value', - ) - response = client.get_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetTagTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - - -def test_get_tag_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_tag_template), - '__call__') as call: - client.get_tag_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetTagTemplateRequest() - -@pytest.mark.asyncio -async def test_get_tag_template_async(transport: str = 'grpc_asyncio', request_type=datacatalog.GetTagTemplateRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate( - name='name_value', - display_name='display_name_value', - )) - response = await client.get_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.GetTagTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - - -@pytest.mark.asyncio -async def test_get_tag_template_async_from_dict(): - await test_get_tag_template_async(request_type=dict) - - -def test_get_tag_template_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.GetTagTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_tag_template), - '__call__') as call: - call.return_value = tags.TagTemplate() - client.get_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_tag_template_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.GetTagTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_tag_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) - await client.get_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_tag_template_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_tag_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_tag_template_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_tag_template( - datacatalog.GetTagTemplateRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_tag_template_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_tag_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_tag_template_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_tag_template( - datacatalog.GetTagTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.UpdateTagTemplateRequest, - dict, -]) -def test_update_tag_template(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplate( - name='name_value', - display_name='display_name_value', - ) - response = client.update_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - - -def test_update_tag_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template), - '__call__') as call: - client.update_tag_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateRequest() - -@pytest.mark.asyncio -async def test_update_tag_template_async(transport: str = 'grpc_asyncio', request_type=datacatalog.UpdateTagTemplateRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate( - name='name_value', - display_name='display_name_value', - )) - response = await client.update_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - - -@pytest.mark.asyncio -async def test_update_tag_template_async_from_dict(): - await test_update_tag_template_async(request_type=dict) - - -def test_update_tag_template_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.UpdateTagTemplateRequest() - - request.tag_template.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template), - '__call__') as call: - call.return_value = tags.TagTemplate() - client.update_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'tag_template.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_tag_template_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.UpdateTagTemplateRequest() - - request.tag_template.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) - await client.update_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'tag_template.name=name_value', - ) in kw['metadata'] - - -def test_update_tag_template_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_tag_template( - tag_template=tags.TagTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].tag_template - mock_val = tags.TagTemplate(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_tag_template_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_tag_template( - datacatalog.UpdateTagTemplateRequest(), - tag_template=tags.TagTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_tag_template_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_tag_template( - tag_template=tags.TagTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].tag_template - mock_val = tags.TagTemplate(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_tag_template_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_tag_template( - datacatalog.UpdateTagTemplateRequest(), - tag_template=tags.TagTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.DeleteTagTemplateRequest, - dict, -]) -def test_delete_tag_template(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_tag_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template), - '__call__') as call: - client.delete_tag_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateRequest() - -@pytest.mark.asyncio -async def test_delete_tag_template_async(transport: str = 'grpc_asyncio', request_type=datacatalog.DeleteTagTemplateRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_tag_template_async_from_dict(): - await test_delete_tag_template_async(request_type=dict) - - -def test_delete_tag_template_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.DeleteTagTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template), - '__call__') as call: - call.return_value = None - client.delete_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_tag_template_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.DeleteTagTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_tag_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_tag_template_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_tag_template( - name='name_value', - force=True, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].force - mock_val = True - assert arg == mock_val - - -def test_delete_tag_template_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_tag_template( - datacatalog.DeleteTagTemplateRequest(), - name='name_value', - force=True, - ) - -@pytest.mark.asyncio -async def test_delete_tag_template_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_tag_template( - name='name_value', - force=True, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].force - mock_val = True - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_tag_template_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_tag_template( - datacatalog.DeleteTagTemplateRequest(), - name='name_value', - force=True, - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.CreateTagTemplateFieldRequest, - dict, -]) -def test_create_tag_template_field(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField( - name='name_value', - display_name='display_name_value', - is_required=True, - description='description_value', - order=540, - ) - response = client.create_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateFieldRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplateField) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.is_required is True - assert response.description == 'description_value' - assert response.order == 540 - - -def test_create_tag_template_field_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template_field), - '__call__') as call: - client.create_tag_template_field() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateFieldRequest() - -@pytest.mark.asyncio -async def test_create_tag_template_field_async(transport: str = 'grpc_asyncio', request_type=datacatalog.CreateTagTemplateFieldRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField( - name='name_value', - display_name='display_name_value', - is_required=True, - description='description_value', - order=540, - )) - response = await client.create_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagTemplateFieldRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplateField) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.is_required is True - assert response.description == 'description_value' - assert response.order == 540 - - -@pytest.mark.asyncio -async def test_create_tag_template_field_async_from_dict(): - await test_create_tag_template_field_async(request_type=dict) - - -def test_create_tag_template_field_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.CreateTagTemplateFieldRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template_field), - '__call__') as call: - call.return_value = tags.TagTemplateField() - client.create_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_tag_template_field_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.CreateTagTemplateFieldRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template_field), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) - await client.create_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_tag_template_field_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_tag_template_field( - parent='parent_value', - tag_template_field_id='tag_template_field_id_value', - tag_template_field=tags.TagTemplateField(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].tag_template_field_id - mock_val = 'tag_template_field_id_value' - assert arg == mock_val - arg = args[0].tag_template_field - mock_val = tags.TagTemplateField(name='name_value') - assert arg == mock_val - - -def test_create_tag_template_field_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_tag_template_field( - datacatalog.CreateTagTemplateFieldRequest(), - parent='parent_value', - tag_template_field_id='tag_template_field_id_value', - tag_template_field=tags.TagTemplateField(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_tag_template_field_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_tag_template_field( - parent='parent_value', - tag_template_field_id='tag_template_field_id_value', - tag_template_field=tags.TagTemplateField(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].tag_template_field_id - mock_val = 'tag_template_field_id_value' - assert arg == mock_val - arg = args[0].tag_template_field - mock_val = tags.TagTemplateField(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_tag_template_field_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_tag_template_field( - datacatalog.CreateTagTemplateFieldRequest(), - parent='parent_value', - tag_template_field_id='tag_template_field_id_value', - tag_template_field=tags.TagTemplateField(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.UpdateTagTemplateFieldRequest, - dict, -]) -def test_update_tag_template_field(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField( - name='name_value', - display_name='display_name_value', - is_required=True, - description='description_value', - order=540, - ) - response = client.update_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplateField) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.is_required is True - assert response.description == 'description_value' - assert response.order == 540 - - -def test_update_tag_template_field_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template_field), - '__call__') as call: - client.update_tag_template_field() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() - -@pytest.mark.asyncio -async def test_update_tag_template_field_async(transport: str = 'grpc_asyncio', request_type=datacatalog.UpdateTagTemplateFieldRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField( - name='name_value', - display_name='display_name_value', - is_required=True, - description='description_value', - order=540, - )) - response = await client.update_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagTemplateFieldRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplateField) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.is_required is True - assert response.description == 'description_value' - assert response.order == 540 - - -@pytest.mark.asyncio -async def test_update_tag_template_field_async_from_dict(): - await test_update_tag_template_field_async(request_type=dict) - - -def test_update_tag_template_field_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.UpdateTagTemplateFieldRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template_field), - '__call__') as call: - call.return_value = tags.TagTemplateField() - client.update_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_tag_template_field_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.UpdateTagTemplateFieldRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template_field), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) - await client.update_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_update_tag_template_field_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_tag_template_field( - name='name_value', - tag_template_field=tags.TagTemplateField(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].tag_template_field - mock_val = tags.TagTemplateField(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_tag_template_field_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_tag_template_field( - datacatalog.UpdateTagTemplateFieldRequest(), - name='name_value', - tag_template_field=tags.TagTemplateField(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_tag_template_field_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_tag_template_field( - name='name_value', - tag_template_field=tags.TagTemplateField(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].tag_template_field - mock_val = tags.TagTemplateField(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_tag_template_field_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_tag_template_field( - datacatalog.UpdateTagTemplateFieldRequest(), - name='name_value', - tag_template_field=tags.TagTemplateField(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.RenameTagTemplateFieldRequest, - dict, -]) -def test_rename_tag_template_field(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField( - name='name_value', - display_name='display_name_value', - is_required=True, - description='description_value', - order=540, - ) - response = client.rename_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.RenameTagTemplateFieldRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplateField) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.is_required is True - assert response.description == 'description_value' - assert response.order == 540 - - -def test_rename_tag_template_field_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field), - '__call__') as call: - client.rename_tag_template_field() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.RenameTagTemplateFieldRequest() - -@pytest.mark.asyncio -async def test_rename_tag_template_field_async(transport: str = 'grpc_asyncio', request_type=datacatalog.RenameTagTemplateFieldRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField( - name='name_value', - display_name='display_name_value', - is_required=True, - description='description_value', - order=540, - )) - response = await client.rename_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.RenameTagTemplateFieldRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplateField) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.is_required is True - assert response.description == 'description_value' - assert response.order == 540 - - -@pytest.mark.asyncio -async def test_rename_tag_template_field_async_from_dict(): - await test_rename_tag_template_field_async(request_type=dict) - - -def test_rename_tag_template_field_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.RenameTagTemplateFieldRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field), - '__call__') as call: - call.return_value = tags.TagTemplateField() - client.rename_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_rename_tag_template_field_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.RenameTagTemplateFieldRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) - await client.rename_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_rename_tag_template_field_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.rename_tag_template_field( - name='name_value', - new_tag_template_field_id='new_tag_template_field_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].new_tag_template_field_id - mock_val = 'new_tag_template_field_id_value' - assert arg == mock_val - - -def test_rename_tag_template_field_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rename_tag_template_field( - datacatalog.RenameTagTemplateFieldRequest(), - name='name_value', - new_tag_template_field_id='new_tag_template_field_id_value', - ) - -@pytest.mark.asyncio -async def test_rename_tag_template_field_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.rename_tag_template_field( - name='name_value', - new_tag_template_field_id='new_tag_template_field_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].new_tag_template_field_id - mock_val = 'new_tag_template_field_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_rename_tag_template_field_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.rename_tag_template_field( - datacatalog.RenameTagTemplateFieldRequest(), - name='name_value', - new_tag_template_field_id='new_tag_template_field_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.RenameTagTemplateFieldEnumValueRequest, - dict, -]) -def test_rename_tag_template_field_enum_value(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field_enum_value), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField( - name='name_value', - display_name='display_name_value', - is_required=True, - description='description_value', - order=540, - ) - response = client.rename_tag_template_field_enum_value(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.RenameTagTemplateFieldEnumValueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplateField) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.is_required is True - assert response.description == 'description_value' - assert response.order == 540 - - -def test_rename_tag_template_field_enum_value_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field_enum_value), - '__call__') as call: - client.rename_tag_template_field_enum_value() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.RenameTagTemplateFieldEnumValueRequest() - -@pytest.mark.asyncio -async def test_rename_tag_template_field_enum_value_async(transport: str = 'grpc_asyncio', request_type=datacatalog.RenameTagTemplateFieldEnumValueRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field_enum_value), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField( - name='name_value', - display_name='display_name_value', - is_required=True, - description='description_value', - order=540, - )) - response = await client.rename_tag_template_field_enum_value(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.RenameTagTemplateFieldEnumValueRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.TagTemplateField) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.is_required is True - assert response.description == 'description_value' - assert response.order == 540 - - -@pytest.mark.asyncio -async def test_rename_tag_template_field_enum_value_async_from_dict(): - await test_rename_tag_template_field_enum_value_async(request_type=dict) - - -def test_rename_tag_template_field_enum_value_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.RenameTagTemplateFieldEnumValueRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field_enum_value), - '__call__') as call: - call.return_value = tags.TagTemplateField() - client.rename_tag_template_field_enum_value(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_rename_tag_template_field_enum_value_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.RenameTagTemplateFieldEnumValueRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field_enum_value), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) - await client.rename_tag_template_field_enum_value(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_rename_tag_template_field_enum_value_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field_enum_value), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.rename_tag_template_field_enum_value( - name='name_value', - new_enum_value_display_name='new_enum_value_display_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].new_enum_value_display_name - mock_val = 'new_enum_value_display_name_value' - assert arg == mock_val - - -def test_rename_tag_template_field_enum_value_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rename_tag_template_field_enum_value( - datacatalog.RenameTagTemplateFieldEnumValueRequest(), - name='name_value', - new_enum_value_display_name='new_enum_value_display_name_value', - ) - -@pytest.mark.asyncio -async def test_rename_tag_template_field_enum_value_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_tag_template_field_enum_value), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.TagTemplateField() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplateField()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.rename_tag_template_field_enum_value( - name='name_value', - new_enum_value_display_name='new_enum_value_display_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].new_enum_value_display_name - mock_val = 'new_enum_value_display_name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_rename_tag_template_field_enum_value_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.rename_tag_template_field_enum_value( - datacatalog.RenameTagTemplateFieldEnumValueRequest(), - name='name_value', - new_enum_value_display_name='new_enum_value_display_name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.DeleteTagTemplateFieldRequest, - dict, -]) -def test_delete_tag_template_field(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateFieldRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_tag_template_field_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template_field), - '__call__') as call: - client.delete_tag_template_field() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateFieldRequest() - -@pytest.mark.asyncio -async def test_delete_tag_template_field_async(transport: str = 'grpc_asyncio', request_type=datacatalog.DeleteTagTemplateFieldRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagTemplateFieldRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_tag_template_field_async_from_dict(): - await test_delete_tag_template_field_async(request_type=dict) - - -def test_delete_tag_template_field_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.DeleteTagTemplateFieldRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template_field), - '__call__') as call: - call.return_value = None - client.delete_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_tag_template_field_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.DeleteTagTemplateFieldRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template_field), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_tag_template_field(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_tag_template_field_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_tag_template_field( - name='name_value', - force=True, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].force - mock_val = True - assert arg == mock_val - - -def test_delete_tag_template_field_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_tag_template_field( - datacatalog.DeleteTagTemplateFieldRequest(), - name='name_value', - force=True, - ) - -@pytest.mark.asyncio -async def test_delete_tag_template_field_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag_template_field), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_tag_template_field( - name='name_value', - force=True, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].force - mock_val = True - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_tag_template_field_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_tag_template_field( - datacatalog.DeleteTagTemplateFieldRequest(), - name='name_value', - force=True, - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.CreateTagRequest, - dict, -]) -def test_create_tag(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.Tag( - name='name_value', - template='template_value', - template_display_name='template_display_name_value', - column='column_value', - ) - response = client.create_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.Tag) - assert response.name == 'name_value' - assert response.template == 'template_value' - assert response.template_display_name == 'template_display_name_value' - - -def test_create_tag_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag), - '__call__') as call: - client.create_tag() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagRequest() - -@pytest.mark.asyncio -async def test_create_tag_async(transport: str = 'grpc_asyncio', request_type=datacatalog.CreateTagRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag( - name='name_value', - template='template_value', - template_display_name='template_display_name_value', - )) - response = await client.create_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.CreateTagRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.Tag) - assert response.name == 'name_value' - assert response.template == 'template_value' - assert response.template_display_name == 'template_display_name_value' - - -@pytest.mark.asyncio -async def test_create_tag_async_from_dict(): - await test_create_tag_async(request_type=dict) - - -def test_create_tag_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.CreateTagRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag), - '__call__') as call: - call.return_value = tags.Tag() - client.create_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_tag_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.CreateTagRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) - await client.create_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_tag_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.Tag() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_tag( - parent='parent_value', - tag=tags.Tag(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].tag - mock_val = tags.Tag(name='name_value') - assert arg == mock_val - - -def test_create_tag_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_tag( - datacatalog.CreateTagRequest(), - parent='parent_value', - tag=tags.Tag(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_tag_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.Tag() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_tag( - parent='parent_value', - tag=tags.Tag(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].tag - mock_val = tags.Tag(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_tag_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_tag( - datacatalog.CreateTagRequest(), - parent='parent_value', - tag=tags.Tag(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.UpdateTagRequest, - dict, -]) -def test_update_tag(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.Tag( - name='name_value', - template='template_value', - template_display_name='template_display_name_value', - column='column_value', - ) - response = client.update_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.Tag) - assert response.name == 'name_value' - assert response.template == 'template_value' - assert response.template_display_name == 'template_display_name_value' - - -def test_update_tag_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag), - '__call__') as call: - client.update_tag() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagRequest() - -@pytest.mark.asyncio -async def test_update_tag_async(transport: str = 'grpc_asyncio', request_type=datacatalog.UpdateTagRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag( - name='name_value', - template='template_value', - template_display_name='template_display_name_value', - )) - response = await client.update_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.UpdateTagRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, tags.Tag) - assert response.name == 'name_value' - assert response.template == 'template_value' - assert response.template_display_name == 'template_display_name_value' - - -@pytest.mark.asyncio -async def test_update_tag_async_from_dict(): - await test_update_tag_async(request_type=dict) - - -def test_update_tag_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.UpdateTagRequest() - - request.tag.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag), - '__call__') as call: - call.return_value = tags.Tag() - client.update_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'tag.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_tag_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.UpdateTagRequest() - - request.tag.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) - await client.update_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'tag.name=name_value', - ) in kw['metadata'] - - -def test_update_tag_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.Tag() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_tag( - tag=tags.Tag(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].tag - mock_val = tags.Tag(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_tag_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_tag( - datacatalog.UpdateTagRequest(), - tag=tags.Tag(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_tag_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = tags.Tag() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_tag( - tag=tags.Tag(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].tag - mock_val = tags.Tag(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_tag_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_tag( - datacatalog.UpdateTagRequest(), - tag=tags.Tag(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.DeleteTagRequest, - dict, -]) -def test_delete_tag(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_tag_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag), - '__call__') as call: - client.delete_tag() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagRequest() - -@pytest.mark.asyncio -async def test_delete_tag_async(transport: str = 'grpc_asyncio', request_type=datacatalog.DeleteTagRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.DeleteTagRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_tag_async_from_dict(): - await test_delete_tag_async(request_type=dict) - - -def test_delete_tag_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.DeleteTagRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag), - '__call__') as call: - call.return_value = None - client.delete_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_tag_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.DeleteTagRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_tag_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_tag( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_tag_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_tag( - datacatalog.DeleteTagRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_tag_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_tag( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_tag_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_tag( - datacatalog.DeleteTagRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datacatalog.ListTagsRequest, - dict, -]) -def test_list_tags(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.ListTagsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_tags(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListTagsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTagsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_tags_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), - '__call__') as call: - client.list_tags() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListTagsRequest() - -@pytest.mark.asyncio -async def test_list_tags_async(transport: str = 'grpc_asyncio', request_type=datacatalog.ListTagsRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListTagsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_tags(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == datacatalog.ListTagsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTagsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_tags_async_from_dict(): - await test_list_tags_async(request_type=dict) - - -def test_list_tags_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.ListTagsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), - '__call__') as call: - call.return_value = datacatalog.ListTagsResponse() - client.list_tags(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_tags_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datacatalog.ListTagsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListTagsResponse()) - await client.list_tags(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_tags_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.ListTagsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_tags( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_tags_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_tags( - datacatalog.ListTagsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_tags_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datacatalog.ListTagsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.ListTagsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_tags( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_tags_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_tags( - datacatalog.ListTagsRequest(), - parent='parent_value', - ) - - -def test_list_tags_pager(transport_name: str = "grpc"): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - tags.Tag(), - tags.Tag(), - ], - next_page_token='abc', - ), - datacatalog.ListTagsResponse( - tags=[], - next_page_token='def', - ), - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - ], - next_page_token='ghi', - ), - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - tags.Tag(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_tags(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, tags.Tag) - for i in results) -def test_list_tags_pages(transport_name: str = "grpc"): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - tags.Tag(), - tags.Tag(), - ], - next_page_token='abc', - ), - datacatalog.ListTagsResponse( - tags=[], - next_page_token='def', - ), - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - ], - next_page_token='ghi', - ), - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - tags.Tag(), - ], - ), - RuntimeError, - ) - pages = list(client.list_tags(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_tags_async_pager(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - tags.Tag(), - tags.Tag(), - ], - next_page_token='abc', - ), - datacatalog.ListTagsResponse( - tags=[], - next_page_token='def', - ), - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - ], - next_page_token='ghi', - ), - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - tags.Tag(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_tags(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, tags.Tag) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_tags_async_pages(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tags), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - tags.Tag(), - tags.Tag(), - ], - next_page_token='abc', - ), - datacatalog.ListTagsResponse( - tags=[], - next_page_token='def', - ), - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - ], - next_page_token='ghi', - ), - datacatalog.ListTagsResponse( - tags=[ - tags.Tag(), - tags.Tag(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_tags(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_set_iam_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - client.set_iam_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) - - -def test_set_iam_policy_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_set_iam_policy_from_dict_foreign(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy(request={ - 'resource': 'resource_value', - 'policy': policy_pb2.Policy(version=774), - 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), - } - ) - call.assert_called() - - -def test_set_iam_policy_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - - -def test_set_iam_policy_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_get_iam_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - client.get_iam_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) - - -def test_get_iam_policy_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_get_iam_policy_from_dict_foreign(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy(request={ - 'resource': 'resource_value', - 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -def test_get_iam_policy_flattened(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - - -def test_get_iam_policy_flattened_error(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_error_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions(request_type, transport: str = 'grpc'): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -def test_test_iam_permissions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - client.test_iam_permissions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_from_dict(): - await test_test_iam_permissions_async(request_type=dict) - - -def test_test_iam_permissions_field_headers(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_test_iam_permissions_from_dict_foreign(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - response = client.test_iam_permissions(request={ - 'resource': 'resource_value', - 'permissions': ['permissions_value'], - } - ) - call.assert_called() - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DataCatalogGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DataCatalogGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataCatalogClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.DataCatalogGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataCatalogClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataCatalogClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.DataCatalogGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataCatalogClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataCatalogGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DataCatalogClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataCatalogGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.DataCatalogGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.DataCatalogGrpcTransport, - transports.DataCatalogGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", -]) -def test_transport_kind(transport_name): - transport = DataCatalogClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DataCatalogGrpcTransport, - ) - -def test_data_catalog_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DataCatalogTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_data_catalog_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.datacatalog_v1beta1.services.data_catalog.transports.DataCatalogTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.DataCatalogTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'search_catalog', - 'create_entry_group', - 'update_entry_group', - 'get_entry_group', - 'delete_entry_group', - 'list_entry_groups', - 'create_entry', - 'update_entry', - 'delete_entry', - 'get_entry', - 'lookup_entry', - 'list_entries', - 'create_tag_template', - 'get_tag_template', - 'update_tag_template', - 'delete_tag_template', - 'create_tag_template_field', - 'update_tag_template_field', - 'rename_tag_template_field', - 'rename_tag_template_field_enum_value', - 'delete_tag_template_field', - 'create_tag', - 'update_tag', - 'delete_tag', - 'list_tags', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_data_catalog_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.datacatalog_v1beta1.services.data_catalog.transports.DataCatalogTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataCatalogTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_data_catalog_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.datacatalog_v1beta1.services.data_catalog.transports.DataCatalogTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataCatalogTransport() - adc.assert_called_once() - - -def test_data_catalog_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DataCatalogClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataCatalogGrpcTransport, - transports.DataCatalogGrpcAsyncIOTransport, - ], -) -def test_data_catalog_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataCatalogGrpcTransport, - transports.DataCatalogGrpcAsyncIOTransport, - ], -) -def test_data_catalog_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DataCatalogGrpcTransport, grpc_helpers), - (transports.DataCatalogGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_data_catalog_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "datacatalog.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="datacatalog.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport]) -def test_data_catalog_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_data_catalog_host_no_port(transport_name): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'datacatalog.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_data_catalog_host_with_port(transport_name): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'datacatalog.googleapis.com:8000' - ) - -def test_data_catalog_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataCatalogGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_data_catalog_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataCatalogGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport]) -def test_data_catalog_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport]) -def test_data_catalog_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_entry_path(): - project = "squid" - location = "clam" - entry_group = "whelk" - entry = "octopus" - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format(project=project, location=location, entry_group=entry_group, entry=entry, ) - actual = DataCatalogClient.entry_path(project, location, entry_group, entry) - assert expected == actual - - -def test_parse_entry_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "entry_group": "cuttlefish", - "entry": "mussel", - } - path = DataCatalogClient.entry_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_entry_path(path) - assert expected == actual - -def test_entry_group_path(): - project = "winkle" - location = "nautilus" - entry_group = "scallop" - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}".format(project=project, location=location, entry_group=entry_group, ) - actual = DataCatalogClient.entry_group_path(project, location, entry_group) - assert expected == actual - - -def test_parse_entry_group_path(): - expected = { - "project": "abalone", - "location": "squid", - "entry_group": "clam", - } - path = DataCatalogClient.entry_group_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_entry_group_path(path) - assert expected == actual - -def test_tag_path(): - project = "whelk" - location = "octopus" - entry_group = "oyster" - entry = "nudibranch" - tag = "cuttlefish" - expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format(project=project, location=location, entry_group=entry_group, entry=entry, tag=tag, ) - actual = DataCatalogClient.tag_path(project, location, entry_group, entry, tag) - assert expected == actual - - -def test_parse_tag_path(): - expected = { - "project": "mussel", - "location": "winkle", - "entry_group": "nautilus", - "entry": "scallop", - "tag": "abalone", - } - path = DataCatalogClient.tag_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_tag_path(path) - assert expected == actual - -def test_tag_template_path(): - project = "squid" - location = "clam" - tag_template = "whelk" - expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}".format(project=project, location=location, tag_template=tag_template, ) - actual = DataCatalogClient.tag_template_path(project, location, tag_template) - assert expected == actual - - -def test_parse_tag_template_path(): - expected = { - "project": "octopus", - "location": "oyster", - "tag_template": "nudibranch", - } - path = DataCatalogClient.tag_template_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_tag_template_path(path) - assert expected == actual - -def test_tag_template_field_path(): - project = "cuttlefish" - location = "mussel" - tag_template = "winkle" - field = "nautilus" - expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}".format(project=project, location=location, tag_template=tag_template, field=field, ) - actual = DataCatalogClient.tag_template_field_path(project, location, tag_template, field) - assert expected == actual - - -def test_parse_tag_template_field_path(): - expected = { - "project": "scallop", - "location": "abalone", - "tag_template": "squid", - "field": "clam", - } - path = DataCatalogClient.tag_template_field_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_tag_template_field_path(path) - assert expected == actual - -def test_tag_template_field_enum_value_path(): - project = "whelk" - location = "octopus" - tag_template = "oyster" - tag_template_field_id = "nudibranch" - enum_value_display_name = "cuttlefish" - expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{tag_template_field_id}/enumValues/{enum_value_display_name}".format(project=project, location=location, tag_template=tag_template, tag_template_field_id=tag_template_field_id, enum_value_display_name=enum_value_display_name, ) - actual = DataCatalogClient.tag_template_field_enum_value_path(project, location, tag_template, tag_template_field_id, enum_value_display_name) - assert expected == actual - - -def test_parse_tag_template_field_enum_value_path(): - expected = { - "project": "mussel", - "location": "winkle", - "tag_template": "nautilus", - "tag_template_field_id": "scallop", - "enum_value_display_name": "abalone", - } - path = DataCatalogClient.tag_template_field_enum_value_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_tag_template_field_enum_value_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = DataCatalogClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = DataCatalogClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = DataCatalogClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = DataCatalogClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = DataCatalogClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = DataCatalogClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = DataCatalogClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = DataCatalogClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = DataCatalogClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = DataCatalogClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DataCatalogClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.DataCatalogTransport, '_prep_wrapped_messages') as prep: - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.DataCatalogTransport, '_prep_wrapped_messages') as prep: - transport_class = DataCatalogClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = DataCatalogAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (DataCatalogClient, transports.DataCatalogGrpcTransport), - (DataCatalogAsyncClient, transports.DataCatalogGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py deleted file mode 100644 index f18f350ff488..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py +++ /dev/null @@ -1,4521 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.datacatalog_v1beta1.services.policy_tag_manager import PolicyTagManagerAsyncClient -from google.cloud.datacatalog_v1beta1.services.policy_tag_manager import PolicyTagManagerClient -from google.cloud.datacatalog_v1beta1.services.policy_tag_manager import pagers -from google.cloud.datacatalog_v1beta1.services.policy_tag_manager import transports -from google.cloud.datacatalog_v1beta1.types import common -from google.cloud.datacatalog_v1beta1.types import policytagmanager -from google.cloud.datacatalog_v1beta1.types import timestamps -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert PolicyTagManagerClient._get_default_mtls_endpoint(None) is None - assert PolicyTagManagerClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert PolicyTagManagerClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert PolicyTagManagerClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert PolicyTagManagerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert PolicyTagManagerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (PolicyTagManagerClient, "grpc"), - (PolicyTagManagerAsyncClient, "grpc_asyncio"), -]) -def test_policy_tag_manager_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'datacatalog.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.PolicyTagManagerGrpcTransport, "grpc"), - (transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_policy_tag_manager_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (PolicyTagManagerClient, "grpc"), - (PolicyTagManagerAsyncClient, "grpc_asyncio"), -]) -def test_policy_tag_manager_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'datacatalog.googleapis.com:443' - ) - - -def test_policy_tag_manager_client_get_transport_class(): - transport = PolicyTagManagerClient.get_transport_class() - available_transports = [ - transports.PolicyTagManagerGrpcTransport, - ] - assert transport in available_transports - - transport = PolicyTagManagerClient.get_transport_class("grpc") - assert transport == transports.PolicyTagManagerGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc"), - (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(PolicyTagManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerClient)) -@mock.patch.object(PolicyTagManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerAsyncClient)) -def test_policy_tag_manager_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(PolicyTagManagerClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(PolicyTagManagerClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc", "true"), - (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc", "false"), - (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(PolicyTagManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerClient)) -@mock.patch.object(PolicyTagManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_policy_tag_manager_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - PolicyTagManagerClient, PolicyTagManagerAsyncClient -]) -@mock.patch.object(PolicyTagManagerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerClient)) -@mock.patch.object(PolicyTagManagerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerAsyncClient)) -def test_policy_tag_manager_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc"), - (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_policy_tag_manager_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc", grpc_helpers), - (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_policy_tag_manager_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_policy_tag_manager_client_client_options_from_dict(): - with mock.patch('google.cloud.datacatalog_v1beta1.services.policy_tag_manager.transports.PolicyTagManagerGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = PolicyTagManagerClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport, "grpc", grpc_helpers), - (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_policy_tag_manager_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "datacatalog.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="datacatalog.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - policytagmanager.CreateTaxonomyRequest, - dict, -]) -def test_create_taxonomy(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.Taxonomy( - name='name_value', - display_name='display_name_value', - description='description_value', - policy_tag_count=1715, - activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], - ) - response = client.create_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.CreateTaxonomyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.Taxonomy) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.policy_tag_count == 1715 - assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] - - -def test_create_taxonomy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_taxonomy), - '__call__') as call: - client.create_taxonomy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.CreateTaxonomyRequest() - -@pytest.mark.asyncio -async def test_create_taxonomy_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.CreateTaxonomyRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy( - name='name_value', - display_name='display_name_value', - description='description_value', - policy_tag_count=1715, - activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], - )) - response = await client.create_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.CreateTaxonomyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.Taxonomy) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.policy_tag_count == 1715 - assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] - - -@pytest.mark.asyncio -async def test_create_taxonomy_async_from_dict(): - await test_create_taxonomy_async(request_type=dict) - - -def test_create_taxonomy_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.CreateTaxonomyRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_taxonomy), - '__call__') as call: - call.return_value = policytagmanager.Taxonomy() - client.create_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_taxonomy_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.CreateTaxonomyRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_taxonomy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) - await client.create_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_taxonomy_flattened(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.Taxonomy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_taxonomy( - parent='parent_value', - taxonomy=policytagmanager.Taxonomy(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].taxonomy - mock_val = policytagmanager.Taxonomy(name='name_value') - assert arg == mock_val - - -def test_create_taxonomy_flattened_error(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_taxonomy( - policytagmanager.CreateTaxonomyRequest(), - parent='parent_value', - taxonomy=policytagmanager.Taxonomy(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_taxonomy_flattened_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.Taxonomy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_taxonomy( - parent='parent_value', - taxonomy=policytagmanager.Taxonomy(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].taxonomy - mock_val = policytagmanager.Taxonomy(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_taxonomy_flattened_error_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_taxonomy( - policytagmanager.CreateTaxonomyRequest(), - parent='parent_value', - taxonomy=policytagmanager.Taxonomy(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - policytagmanager.DeleteTaxonomyRequest, - dict, -]) -def test_delete_taxonomy(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.DeleteTaxonomyRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_taxonomy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_taxonomy), - '__call__') as call: - client.delete_taxonomy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.DeleteTaxonomyRequest() - -@pytest.mark.asyncio -async def test_delete_taxonomy_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.DeleteTaxonomyRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.DeleteTaxonomyRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_taxonomy_async_from_dict(): - await test_delete_taxonomy_async(request_type=dict) - - -def test_delete_taxonomy_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.DeleteTaxonomyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_taxonomy), - '__call__') as call: - call.return_value = None - client.delete_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_taxonomy_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.DeleteTaxonomyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_taxonomy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_taxonomy_flattened(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_taxonomy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_taxonomy_flattened_error(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_taxonomy( - policytagmanager.DeleteTaxonomyRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_taxonomy_flattened_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_taxonomy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_taxonomy_flattened_error_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_taxonomy( - policytagmanager.DeleteTaxonomyRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - policytagmanager.UpdateTaxonomyRequest, - dict, -]) -def test_update_taxonomy(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.Taxonomy( - name='name_value', - display_name='display_name_value', - description='description_value', - policy_tag_count=1715, - activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], - ) - response = client.update_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.UpdateTaxonomyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.Taxonomy) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.policy_tag_count == 1715 - assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] - - -def test_update_taxonomy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_taxonomy), - '__call__') as call: - client.update_taxonomy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.UpdateTaxonomyRequest() - -@pytest.mark.asyncio -async def test_update_taxonomy_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.UpdateTaxonomyRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy( - name='name_value', - display_name='display_name_value', - description='description_value', - policy_tag_count=1715, - activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], - )) - response = await client.update_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.UpdateTaxonomyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.Taxonomy) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.policy_tag_count == 1715 - assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] - - -@pytest.mark.asyncio -async def test_update_taxonomy_async_from_dict(): - await test_update_taxonomy_async(request_type=dict) - - -def test_update_taxonomy_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.UpdateTaxonomyRequest() - - request.taxonomy.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_taxonomy), - '__call__') as call: - call.return_value = policytagmanager.Taxonomy() - client.update_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'taxonomy.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_taxonomy_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.UpdateTaxonomyRequest() - - request.taxonomy.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_taxonomy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) - await client.update_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'taxonomy.name=name_value', - ) in kw['metadata'] - - -def test_update_taxonomy_flattened(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.Taxonomy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_taxonomy( - taxonomy=policytagmanager.Taxonomy(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].taxonomy - mock_val = policytagmanager.Taxonomy(name='name_value') - assert arg == mock_val - - -def test_update_taxonomy_flattened_error(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_taxonomy( - policytagmanager.UpdateTaxonomyRequest(), - taxonomy=policytagmanager.Taxonomy(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_update_taxonomy_flattened_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.Taxonomy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_taxonomy( - taxonomy=policytagmanager.Taxonomy(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].taxonomy - mock_val = policytagmanager.Taxonomy(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_taxonomy_flattened_error_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_taxonomy( - policytagmanager.UpdateTaxonomyRequest(), - taxonomy=policytagmanager.Taxonomy(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - policytagmanager.ListTaxonomiesRequest, - dict, -]) -def test_list_taxonomies(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.ListTaxonomiesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.ListTaxonomiesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTaxonomiesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_taxonomies_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_taxonomies), - '__call__') as call: - client.list_taxonomies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.ListTaxonomiesRequest() - -@pytest.mark.asyncio -async def test_list_taxonomies_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.ListTaxonomiesRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListTaxonomiesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.ListTaxonomiesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTaxonomiesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_taxonomies_async_from_dict(): - await test_list_taxonomies_async(request_type=dict) - - -def test_list_taxonomies_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.ListTaxonomiesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_taxonomies), - '__call__') as call: - call.return_value = policytagmanager.ListTaxonomiesResponse() - client.list_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_taxonomies_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.ListTaxonomiesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_taxonomies), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListTaxonomiesResponse()) - await client.list_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_taxonomies_flattened(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.ListTaxonomiesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_taxonomies( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_taxonomies_flattened_error(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_taxonomies( - policytagmanager.ListTaxonomiesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_taxonomies_flattened_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.ListTaxonomiesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListTaxonomiesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_taxonomies( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_taxonomies_flattened_error_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_taxonomies( - policytagmanager.ListTaxonomiesRequest(), - parent='parent_value', - ) - - -def test_list_taxonomies_pager(transport_name: str = "grpc"): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_taxonomies), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - ], - next_page_token='abc', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[], - next_page_token='def', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - ], - next_page_token='ghi', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_taxonomies(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, policytagmanager.Taxonomy) - for i in results) -def test_list_taxonomies_pages(transport_name: str = "grpc"): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_taxonomies), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - ], - next_page_token='abc', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[], - next_page_token='def', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - ], - next_page_token='ghi', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - ], - ), - RuntimeError, - ) - pages = list(client.list_taxonomies(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_taxonomies_async_pager(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_taxonomies), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - ], - next_page_token='abc', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[], - next_page_token='def', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - ], - next_page_token='ghi', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_taxonomies(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, policytagmanager.Taxonomy) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_taxonomies_async_pages(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_taxonomies), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - ], - next_page_token='abc', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[], - next_page_token='def', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - ], - next_page_token='ghi', - ), - policytagmanager.ListTaxonomiesResponse( - taxonomies=[ - policytagmanager.Taxonomy(), - policytagmanager.Taxonomy(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_taxonomies(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - policytagmanager.GetTaxonomyRequest, - dict, -]) -def test_get_taxonomy(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.Taxonomy( - name='name_value', - display_name='display_name_value', - description='description_value', - policy_tag_count=1715, - activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], - ) - response = client.get_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.GetTaxonomyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.Taxonomy) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.policy_tag_count == 1715 - assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] - - -def test_get_taxonomy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_taxonomy), - '__call__') as call: - client.get_taxonomy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.GetTaxonomyRequest() - -@pytest.mark.asyncio -async def test_get_taxonomy_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.GetTaxonomyRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy( - name='name_value', - display_name='display_name_value', - description='description_value', - policy_tag_count=1715, - activated_policy_types=[policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL], - )) - response = await client.get_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.GetTaxonomyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.Taxonomy) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.policy_tag_count == 1715 - assert response.activated_policy_types == [policytagmanager.Taxonomy.PolicyType.FINE_GRAINED_ACCESS_CONTROL] - - -@pytest.mark.asyncio -async def test_get_taxonomy_async_from_dict(): - await test_get_taxonomy_async(request_type=dict) - - -def test_get_taxonomy_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.GetTaxonomyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_taxonomy), - '__call__') as call: - call.return_value = policytagmanager.Taxonomy() - client.get_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_taxonomy_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.GetTaxonomyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_taxonomy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) - await client.get_taxonomy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_taxonomy_flattened(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.Taxonomy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_taxonomy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_taxonomy_flattened_error(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_taxonomy( - policytagmanager.GetTaxonomyRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_taxonomy_flattened_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_taxonomy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.Taxonomy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.Taxonomy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_taxonomy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_taxonomy_flattened_error_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_taxonomy( - policytagmanager.GetTaxonomyRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - policytagmanager.CreatePolicyTagRequest, - dict, -]) -def test_create_policy_tag(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.PolicyTag( - name='name_value', - display_name='display_name_value', - description='description_value', - parent_policy_tag='parent_policy_tag_value', - child_policy_tags=['child_policy_tags_value'], - ) - response = client.create_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.CreatePolicyTagRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.PolicyTag) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent_policy_tag == 'parent_policy_tag_value' - assert response.child_policy_tags == ['child_policy_tags_value'] - - -def test_create_policy_tag_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_policy_tag), - '__call__') as call: - client.create_policy_tag() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.CreatePolicyTagRequest() - -@pytest.mark.asyncio -async def test_create_policy_tag_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.CreatePolicyTagRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag( - name='name_value', - display_name='display_name_value', - description='description_value', - parent_policy_tag='parent_policy_tag_value', - child_policy_tags=['child_policy_tags_value'], - )) - response = await client.create_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.CreatePolicyTagRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.PolicyTag) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent_policy_tag == 'parent_policy_tag_value' - assert response.child_policy_tags == ['child_policy_tags_value'] - - -@pytest.mark.asyncio -async def test_create_policy_tag_async_from_dict(): - await test_create_policy_tag_async(request_type=dict) - - -def test_create_policy_tag_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.CreatePolicyTagRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_policy_tag), - '__call__') as call: - call.return_value = policytagmanager.PolicyTag() - client.create_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_policy_tag_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.CreatePolicyTagRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_policy_tag), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) - await client.create_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_policy_tag_flattened(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.PolicyTag() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_policy_tag( - parent='parent_value', - policy_tag=policytagmanager.PolicyTag(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].policy_tag - mock_val = policytagmanager.PolicyTag(name='name_value') - assert arg == mock_val - - -def test_create_policy_tag_flattened_error(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_policy_tag( - policytagmanager.CreatePolicyTagRequest(), - parent='parent_value', - policy_tag=policytagmanager.PolicyTag(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_policy_tag_flattened_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.PolicyTag() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_policy_tag( - parent='parent_value', - policy_tag=policytagmanager.PolicyTag(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].policy_tag - mock_val = policytagmanager.PolicyTag(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_policy_tag_flattened_error_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_policy_tag( - policytagmanager.CreatePolicyTagRequest(), - parent='parent_value', - policy_tag=policytagmanager.PolicyTag(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - policytagmanager.DeletePolicyTagRequest, - dict, -]) -def test_delete_policy_tag(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.DeletePolicyTagRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_policy_tag_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_policy_tag), - '__call__') as call: - client.delete_policy_tag() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.DeletePolicyTagRequest() - -@pytest.mark.asyncio -async def test_delete_policy_tag_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.DeletePolicyTagRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.DeletePolicyTagRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_policy_tag_async_from_dict(): - await test_delete_policy_tag_async(request_type=dict) - - -def test_delete_policy_tag_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.DeletePolicyTagRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_policy_tag), - '__call__') as call: - call.return_value = None - client.delete_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_policy_tag_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.DeletePolicyTagRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_policy_tag), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_policy_tag_flattened(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_policy_tag( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_policy_tag_flattened_error(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_policy_tag( - policytagmanager.DeletePolicyTagRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_policy_tag_flattened_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_policy_tag( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_policy_tag_flattened_error_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_policy_tag( - policytagmanager.DeletePolicyTagRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - policytagmanager.UpdatePolicyTagRequest, - dict, -]) -def test_update_policy_tag(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.PolicyTag( - name='name_value', - display_name='display_name_value', - description='description_value', - parent_policy_tag='parent_policy_tag_value', - child_policy_tags=['child_policy_tags_value'], - ) - response = client.update_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.UpdatePolicyTagRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.PolicyTag) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent_policy_tag == 'parent_policy_tag_value' - assert response.child_policy_tags == ['child_policy_tags_value'] - - -def test_update_policy_tag_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_policy_tag), - '__call__') as call: - client.update_policy_tag() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.UpdatePolicyTagRequest() - -@pytest.mark.asyncio -async def test_update_policy_tag_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.UpdatePolicyTagRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag( - name='name_value', - display_name='display_name_value', - description='description_value', - parent_policy_tag='parent_policy_tag_value', - child_policy_tags=['child_policy_tags_value'], - )) - response = await client.update_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.UpdatePolicyTagRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.PolicyTag) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent_policy_tag == 'parent_policy_tag_value' - assert response.child_policy_tags == ['child_policy_tags_value'] - - -@pytest.mark.asyncio -async def test_update_policy_tag_async_from_dict(): - await test_update_policy_tag_async(request_type=dict) - - -def test_update_policy_tag_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.UpdatePolicyTagRequest() - - request.policy_tag.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_policy_tag), - '__call__') as call: - call.return_value = policytagmanager.PolicyTag() - client.update_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'policy_tag.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_policy_tag_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.UpdatePolicyTagRequest() - - request.policy_tag.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_policy_tag), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) - await client.update_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'policy_tag.name=name_value', - ) in kw['metadata'] - - -def test_update_policy_tag_flattened(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.PolicyTag() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_policy_tag( - policy_tag=policytagmanager.PolicyTag(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].policy_tag - mock_val = policytagmanager.PolicyTag(name='name_value') - assert arg == mock_val - - -def test_update_policy_tag_flattened_error(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_policy_tag( - policytagmanager.UpdatePolicyTagRequest(), - policy_tag=policytagmanager.PolicyTag(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_update_policy_tag_flattened_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.PolicyTag() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_policy_tag( - policy_tag=policytagmanager.PolicyTag(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].policy_tag - mock_val = policytagmanager.PolicyTag(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_policy_tag_flattened_error_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_policy_tag( - policytagmanager.UpdatePolicyTagRequest(), - policy_tag=policytagmanager.PolicyTag(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - policytagmanager.ListPolicyTagsRequest, - dict, -]) -def test_list_policy_tags(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_policy_tags), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.ListPolicyTagsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_policy_tags(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.ListPolicyTagsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPolicyTagsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_policy_tags_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_policy_tags), - '__call__') as call: - client.list_policy_tags() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.ListPolicyTagsRequest() - -@pytest.mark.asyncio -async def test_list_policy_tags_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.ListPolicyTagsRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_policy_tags), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListPolicyTagsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_policy_tags(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.ListPolicyTagsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPolicyTagsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_policy_tags_async_from_dict(): - await test_list_policy_tags_async(request_type=dict) - - -def test_list_policy_tags_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.ListPolicyTagsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_policy_tags), - '__call__') as call: - call.return_value = policytagmanager.ListPolicyTagsResponse() - client.list_policy_tags(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_policy_tags_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.ListPolicyTagsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_policy_tags), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListPolicyTagsResponse()) - await client.list_policy_tags(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_policy_tags_flattened(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_policy_tags), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.ListPolicyTagsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_policy_tags( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_policy_tags_flattened_error(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_policy_tags( - policytagmanager.ListPolicyTagsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_policy_tags_flattened_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_policy_tags), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.ListPolicyTagsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.ListPolicyTagsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_policy_tags( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_policy_tags_flattened_error_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_policy_tags( - policytagmanager.ListPolicyTagsRequest(), - parent='parent_value', - ) - - -def test_list_policy_tags_pager(transport_name: str = "grpc"): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_policy_tags), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - ], - next_page_token='abc', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[], - next_page_token='def', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - ], - next_page_token='ghi', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_policy_tags(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, policytagmanager.PolicyTag) - for i in results) -def test_list_policy_tags_pages(transport_name: str = "grpc"): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_policy_tags), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - ], - next_page_token='abc', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[], - next_page_token='def', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - ], - next_page_token='ghi', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - ], - ), - RuntimeError, - ) - pages = list(client.list_policy_tags(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_policy_tags_async_pager(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_policy_tags), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - ], - next_page_token='abc', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[], - next_page_token='def', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - ], - next_page_token='ghi', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_policy_tags(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, policytagmanager.PolicyTag) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_policy_tags_async_pages(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_policy_tags), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - ], - next_page_token='abc', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[], - next_page_token='def', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - ], - next_page_token='ghi', - ), - policytagmanager.ListPolicyTagsResponse( - policy_tags=[ - policytagmanager.PolicyTag(), - policytagmanager.PolicyTag(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_policy_tags(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - policytagmanager.GetPolicyTagRequest, - dict, -]) -def test_get_policy_tag(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.PolicyTag( - name='name_value', - display_name='display_name_value', - description='description_value', - parent_policy_tag='parent_policy_tag_value', - child_policy_tags=['child_policy_tags_value'], - ) - response = client.get_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.GetPolicyTagRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.PolicyTag) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent_policy_tag == 'parent_policy_tag_value' - assert response.child_policy_tags == ['child_policy_tags_value'] - - -def test_get_policy_tag_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_policy_tag), - '__call__') as call: - client.get_policy_tag() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.GetPolicyTagRequest() - -@pytest.mark.asyncio -async def test_get_policy_tag_async(transport: str = 'grpc_asyncio', request_type=policytagmanager.GetPolicyTagRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag( - name='name_value', - display_name='display_name_value', - description='description_value', - parent_policy_tag='parent_policy_tag_value', - child_policy_tags=['child_policy_tags_value'], - )) - response = await client.get_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanager.GetPolicyTagRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanager.PolicyTag) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.parent_policy_tag == 'parent_policy_tag_value' - assert response.child_policy_tags == ['child_policy_tags_value'] - - -@pytest.mark.asyncio -async def test_get_policy_tag_async_from_dict(): - await test_get_policy_tag_async(request_type=dict) - - -def test_get_policy_tag_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.GetPolicyTagRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_policy_tag), - '__call__') as call: - call.return_value = policytagmanager.PolicyTag() - client.get_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_policy_tag_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanager.GetPolicyTagRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_policy_tag), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) - await client.get_policy_tag(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_policy_tag_flattened(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.PolicyTag() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_policy_tag( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_policy_tag_flattened_error(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_policy_tag( - policytagmanager.GetPolicyTagRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_policy_tag_flattened_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_policy_tag), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanager.PolicyTag() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanager.PolicyTag()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_policy_tag( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_policy_tag_flattened_error_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_policy_tag( - policytagmanager.GetPolicyTagRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_get_iam_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - client.get_iam_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) - - -def test_get_iam_policy_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_get_iam_policy_from_dict_foreign(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy(request={ - 'resource': 'resource_value', - 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_set_iam_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - client.set_iam_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) - - -def test_set_iam_policy_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_set_iam_policy_from_dict_foreign(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy(request={ - 'resource': 'resource_value', - 'policy': policy_pb2.Policy(version=774), - 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), - } - ) - call.assert_called() - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions(request_type, transport: str = 'grpc'): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -def test_test_iam_permissions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - client.test_iam_permissions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_from_dict(): - await test_test_iam_permissions_async(request_type=dict) - - -def test_test_iam_permissions_field_headers(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_test_iam_permissions_from_dict_foreign(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - response = client.test_iam_permissions(request={ - 'resource': 'resource_value', - 'permissions': ['permissions_value'], - } - ) - call.assert_called() - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.PolicyTagManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.PolicyTagManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = PolicyTagManagerClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.PolicyTagManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = PolicyTagManagerClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = PolicyTagManagerClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.PolicyTagManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = PolicyTagManagerClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.PolicyTagManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = PolicyTagManagerClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.PolicyTagManagerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.PolicyTagManagerGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.PolicyTagManagerGrpcTransport, - transports.PolicyTagManagerGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", -]) -def test_transport_kind(transport_name): - transport = PolicyTagManagerClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.PolicyTagManagerGrpcTransport, - ) - -def test_policy_tag_manager_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.PolicyTagManagerTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_policy_tag_manager_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.datacatalog_v1beta1.services.policy_tag_manager.transports.PolicyTagManagerTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.PolicyTagManagerTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_taxonomy', - 'delete_taxonomy', - 'update_taxonomy', - 'list_taxonomies', - 'get_taxonomy', - 'create_policy_tag', - 'delete_policy_tag', - 'update_policy_tag', - 'list_policy_tags', - 'get_policy_tag', - 'get_iam_policy', - 'set_iam_policy', - 'test_iam_permissions', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_policy_tag_manager_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.datacatalog_v1beta1.services.policy_tag_manager.transports.PolicyTagManagerTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.PolicyTagManagerTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_policy_tag_manager_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.datacatalog_v1beta1.services.policy_tag_manager.transports.PolicyTagManagerTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.PolicyTagManagerTransport() - adc.assert_called_once() - - -def test_policy_tag_manager_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - PolicyTagManagerClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.PolicyTagManagerGrpcTransport, - transports.PolicyTagManagerGrpcAsyncIOTransport, - ], -) -def test_policy_tag_manager_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.PolicyTagManagerGrpcTransport, - transports.PolicyTagManagerGrpcAsyncIOTransport, - ], -) -def test_policy_tag_manager_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.PolicyTagManagerGrpcTransport, grpc_helpers), - (transports.PolicyTagManagerGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_policy_tag_manager_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "datacatalog.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="datacatalog.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerGrpcTransport, transports.PolicyTagManagerGrpcAsyncIOTransport]) -def test_policy_tag_manager_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_policy_tag_manager_host_no_port(transport_name): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'datacatalog.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_policy_tag_manager_host_with_port(transport_name): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'datacatalog.googleapis.com:8000' - ) - -def test_policy_tag_manager_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.PolicyTagManagerGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_policy_tag_manager_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.PolicyTagManagerGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerGrpcTransport, transports.PolicyTagManagerGrpcAsyncIOTransport]) -def test_policy_tag_manager_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerGrpcTransport, transports.PolicyTagManagerGrpcAsyncIOTransport]) -def test_policy_tag_manager_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_policy_tag_path(): - project = "squid" - location = "clam" - taxonomy = "whelk" - policy_tag = "octopus" - expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}/policyTags/{policy_tag}".format(project=project, location=location, taxonomy=taxonomy, policy_tag=policy_tag, ) - actual = PolicyTagManagerClient.policy_tag_path(project, location, taxonomy, policy_tag) - assert expected == actual - - -def test_parse_policy_tag_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "taxonomy": "cuttlefish", - "policy_tag": "mussel", - } - path = PolicyTagManagerClient.policy_tag_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerClient.parse_policy_tag_path(path) - assert expected == actual - -def test_taxonomy_path(): - project = "winkle" - location = "nautilus" - taxonomy = "scallop" - expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format(project=project, location=location, taxonomy=taxonomy, ) - actual = PolicyTagManagerClient.taxonomy_path(project, location, taxonomy) - assert expected == actual - - -def test_parse_taxonomy_path(): - expected = { - "project": "abalone", - "location": "squid", - "taxonomy": "clam", - } - path = PolicyTagManagerClient.taxonomy_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerClient.parse_taxonomy_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = PolicyTagManagerClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "octopus", - } - path = PolicyTagManagerClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "oyster" - expected = "folders/{folder}".format(folder=folder, ) - actual = PolicyTagManagerClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nudibranch", - } - path = PolicyTagManagerClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization, ) - actual = PolicyTagManagerClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "mussel", - } - path = PolicyTagManagerClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "winkle" - expected = "projects/{project}".format(project=project, ) - actual = PolicyTagManagerClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nautilus", - } - path = PolicyTagManagerClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "scallop" - location = "abalone" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = PolicyTagManagerClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "squid", - "location": "clam", - } - path = PolicyTagManagerClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.PolicyTagManagerTransport, '_prep_wrapped_messages') as prep: - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.PolicyTagManagerTransport, '_prep_wrapped_messages') as prep: - transport_class = PolicyTagManagerClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = PolicyTagManagerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = PolicyTagManagerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (PolicyTagManagerClient, transports.PolicyTagManagerGrpcTransport), - (PolicyTagManagerAsyncClient, transports.PolicyTagManagerGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py b/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py deleted file mode 100644 index 88d1769958e7..000000000000 --- a/owl-bot-staging/google-cloud-datacatalog/v1beta1/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py +++ /dev/null @@ -1,1456 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization import PolicyTagManagerSerializationAsyncClient -from google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization import PolicyTagManagerSerializationClient -from google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization import transports -from google.cloud.datacatalog_v1beta1.types import policytagmanager -from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(None) is None - assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert PolicyTagManagerSerializationClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (PolicyTagManagerSerializationClient, "grpc"), - (PolicyTagManagerSerializationAsyncClient, "grpc_asyncio"), -]) -def test_policy_tag_manager_serialization_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'datacatalog.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.PolicyTagManagerSerializationGrpcTransport, "grpc"), - (transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_policy_tag_manager_serialization_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (PolicyTagManagerSerializationClient, "grpc"), - (PolicyTagManagerSerializationAsyncClient, "grpc_asyncio"), -]) -def test_policy_tag_manager_serialization_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'datacatalog.googleapis.com:443' - ) - - -def test_policy_tag_manager_serialization_client_get_transport_class(): - transport = PolicyTagManagerSerializationClient.get_transport_class() - available_transports = [ - transports.PolicyTagManagerSerializationGrpcTransport, - ] - assert transport in available_transports - - transport = PolicyTagManagerSerializationClient.get_transport_class("grpc") - assert transport == transports.PolicyTagManagerSerializationGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc"), - (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(PolicyTagManagerSerializationClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationClient)) -@mock.patch.object(PolicyTagManagerSerializationAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationAsyncClient)) -def test_policy_tag_manager_serialization_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(PolicyTagManagerSerializationClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(PolicyTagManagerSerializationClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc", "true"), - (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc", "false"), - (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(PolicyTagManagerSerializationClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationClient)) -@mock.patch.object(PolicyTagManagerSerializationAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_policy_tag_manager_serialization_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - PolicyTagManagerSerializationClient, PolicyTagManagerSerializationAsyncClient -]) -@mock.patch.object(PolicyTagManagerSerializationClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationClient)) -@mock.patch.object(PolicyTagManagerSerializationAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PolicyTagManagerSerializationAsyncClient)) -def test_policy_tag_manager_serialization_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc"), - (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_policy_tag_manager_serialization_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc", grpc_helpers), - (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_policy_tag_manager_serialization_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_policy_tag_manager_serialization_client_client_options_from_dict(): - with mock.patch('google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = PolicyTagManagerSerializationClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport, "grpc", grpc_helpers), - (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_policy_tag_manager_serialization_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "datacatalog.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="datacatalog.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - policytagmanagerserialization.ImportTaxonomiesRequest, - dict, -]) -def test_import_taxonomies(request_type, transport: str = 'grpc'): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanagerserialization.ImportTaxonomiesResponse( - ) - response = client.import_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanagerserialization.ImportTaxonomiesResponse) - - -def test_import_taxonomies_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_taxonomies), - '__call__') as call: - client.import_taxonomies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest() - -@pytest.mark.asyncio -async def test_import_taxonomies_async(transport: str = 'grpc_asyncio', request_type=policytagmanagerserialization.ImportTaxonomiesRequest): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanagerserialization.ImportTaxonomiesResponse( - )) - response = await client.import_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanagerserialization.ImportTaxonomiesResponse) - - -@pytest.mark.asyncio -async def test_import_taxonomies_async_from_dict(): - await test_import_taxonomies_async(request_type=dict) - - -def test_import_taxonomies_field_headers(): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanagerserialization.ImportTaxonomiesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_taxonomies), - '__call__') as call: - call.return_value = policytagmanagerserialization.ImportTaxonomiesResponse() - client.import_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_import_taxonomies_field_headers_async(): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanagerserialization.ImportTaxonomiesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_taxonomies), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanagerserialization.ImportTaxonomiesResponse()) - await client.import_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - policytagmanagerserialization.ExportTaxonomiesRequest, - dict, -]) -def test_export_taxonomies(request_type, transport: str = 'grpc'): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policytagmanagerserialization.ExportTaxonomiesResponse( - ) - response = client.export_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanagerserialization.ExportTaxonomiesResponse) - - -def test_export_taxonomies_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_taxonomies), - '__call__') as call: - client.export_taxonomies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest() - -@pytest.mark.asyncio -async def test_export_taxonomies_async(transport: str = 'grpc_asyncio', request_type=policytagmanagerserialization.ExportTaxonomiesRequest): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_taxonomies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policytagmanagerserialization.ExportTaxonomiesResponse( - )) - response = await client.export_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, policytagmanagerserialization.ExportTaxonomiesResponse) - - -@pytest.mark.asyncio -async def test_export_taxonomies_async_from_dict(): - await test_export_taxonomies_async(request_type=dict) - - -def test_export_taxonomies_field_headers(): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanagerserialization.ExportTaxonomiesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_taxonomies), - '__call__') as call: - call.return_value = policytagmanagerserialization.ExportTaxonomiesResponse() - client.export_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_export_taxonomies_field_headers_async(): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = policytagmanagerserialization.ExportTaxonomiesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_taxonomies), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policytagmanagerserialization.ExportTaxonomiesResponse()) - await client.export_taxonomies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.PolicyTagManagerSerializationGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.PolicyTagManagerSerializationGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = PolicyTagManagerSerializationClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.PolicyTagManagerSerializationGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = PolicyTagManagerSerializationClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = PolicyTagManagerSerializationClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.PolicyTagManagerSerializationGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = PolicyTagManagerSerializationClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.PolicyTagManagerSerializationGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = PolicyTagManagerSerializationClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.PolicyTagManagerSerializationGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.PolicyTagManagerSerializationGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.PolicyTagManagerSerializationGrpcTransport, - transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", -]) -def test_transport_kind(transport_name): - transport = PolicyTagManagerSerializationClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.PolicyTagManagerSerializationGrpcTransport, - ) - -def test_policy_tag_manager_serialization_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.PolicyTagManagerSerializationTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_policy_tag_manager_serialization_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.PolicyTagManagerSerializationTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'import_taxonomies', - 'export_taxonomies', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_policy_tag_manager_serialization_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.PolicyTagManagerSerializationTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_policy_tag_manager_serialization_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.PolicyTagManagerSerializationTransport() - adc.assert_called_once() - - -def test_policy_tag_manager_serialization_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - PolicyTagManagerSerializationClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.PolicyTagManagerSerializationGrpcTransport, - transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, - ], -) -def test_policy_tag_manager_serialization_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.PolicyTagManagerSerializationGrpcTransport, - transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, - ], -) -def test_policy_tag_manager_serialization_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.PolicyTagManagerSerializationGrpcTransport, grpc_helpers), - (transports.PolicyTagManagerSerializationGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_policy_tag_manager_serialization_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "datacatalog.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="datacatalog.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerSerializationGrpcTransport, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport]) -def test_policy_tag_manager_serialization_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_policy_tag_manager_serialization_host_no_port(transport_name): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'datacatalog.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_policy_tag_manager_serialization_host_with_port(transport_name): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='datacatalog.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'datacatalog.googleapis.com:8000' - ) - -def test_policy_tag_manager_serialization_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.PolicyTagManagerSerializationGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_policy_tag_manager_serialization_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.PolicyTagManagerSerializationGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerSerializationGrpcTransport, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport]) -def test_policy_tag_manager_serialization_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.PolicyTagManagerSerializationGrpcTransport, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport]) -def test_policy_tag_manager_serialization_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_taxonomy_path(): - project = "squid" - location = "clam" - taxonomy = "whelk" - expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format(project=project, location=location, taxonomy=taxonomy, ) - actual = PolicyTagManagerSerializationClient.taxonomy_path(project, location, taxonomy) - assert expected == actual - - -def test_parse_taxonomy_path(): - expected = { - "project": "octopus", - "location": "oyster", - "taxonomy": "nudibranch", - } - path = PolicyTagManagerSerializationClient.taxonomy_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerSerializationClient.parse_taxonomy_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = PolicyTagManagerSerializationClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = PolicyTagManagerSerializationClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerSerializationClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = PolicyTagManagerSerializationClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = PolicyTagManagerSerializationClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerSerializationClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = PolicyTagManagerSerializationClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = PolicyTagManagerSerializationClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerSerializationClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = PolicyTagManagerSerializationClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = PolicyTagManagerSerializationClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerSerializationClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = PolicyTagManagerSerializationClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = PolicyTagManagerSerializationClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = PolicyTagManagerSerializationClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.PolicyTagManagerSerializationTransport, '_prep_wrapped_messages') as prep: - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.PolicyTagManagerSerializationTransport, '_prep_wrapped_messages') as prep: - transport_class = PolicyTagManagerSerializationClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = PolicyTagManagerSerializationAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = PolicyTagManagerSerializationClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (PolicyTagManagerSerializationClient, transports.PolicyTagManagerSerializationGrpcTransport), - (PolicyTagManagerSerializationAsyncClient, transports.PolicyTagManagerSerializationGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/packages/google-cloud-datacatalog/CONTRIBUTING.rst b/packages/google-cloud-datacatalog/CONTRIBUTING.rst index 597e8dd95079..8e56646cbb04 100644 --- a/packages/google-cloud-datacatalog/CONTRIBUTING.rst +++ b/packages/google-cloud-datacatalog/CONTRIBUTING.rst @@ -143,12 +143,12 @@ Running System Tests $ nox -s system # Run a single system test - $ nox -s system- -- -k + $ nox -s system-3.11 -- -k .. note:: - System tests are only configured to run under Python. + System tests are only configured to run under Python 3.8, 3.9, 3.10 and 3.11. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog/__init__.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog/__init__.py index b042e9ed8fb9..7cf5c4650ed6 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog/__init__.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog/__init__.py @@ -56,6 +56,7 @@ CreateTagTemplateFieldRequest, CreateTagTemplateRequest, DatabaseTableSpec, + DatasetSpec, DataSourceConnectionSpec, DeleteEntryGroupRequest, DeleteEntryRequest, @@ -81,6 +82,7 @@ ListTagsResponse, LookerSystemSpec, LookupEntryRequest, + ModelSpec, ModifyEntryContactsRequest, ModifyEntryOverviewRequest, ReconcileTagsMetadata, @@ -102,6 +104,9 @@ UpdateTagRequest, UpdateTagTemplateFieldRequest, UpdateTagTemplateRequest, + VertexDatasetSpec, + VertexModelSourceInfo, + VertexModelSpec, ) from google.cloud.datacatalog_v1.types.dataplex_spec import ( DataplexExternalTable, @@ -193,6 +198,7 @@ "CreateTagTemplateFieldRequest", "CreateTagTemplateRequest", "DatabaseTableSpec", + "DatasetSpec", "DataSourceConnectionSpec", "DeleteEntryGroupRequest", "DeleteEntryRequest", @@ -217,6 +223,7 @@ "ListTagsResponse", "LookerSystemSpec", "LookupEntryRequest", + "ModelSpec", "ModifyEntryContactsRequest", "ModifyEntryOverviewRequest", "ReconcileTagsMetadata", @@ -238,6 +245,9 @@ "UpdateTagRequest", "UpdateTagTemplateFieldRequest", "UpdateTagTemplateRequest", + "VertexDatasetSpec", + "VertexModelSourceInfo", + "VertexModelSpec", "EntryType", "DataplexExternalTable", "DataplexFilesetSpec", diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py index dcf8378be674..360a0d13ebdd 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.15.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/__init__.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/__init__.py index 09dfa44860a0..2924918369a4 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/__init__.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/__init__.py @@ -45,6 +45,7 @@ CreateTagTemplateFieldRequest, CreateTagTemplateRequest, DatabaseTableSpec, + DatasetSpec, DataSourceConnectionSpec, DeleteEntryGroupRequest, DeleteEntryRequest, @@ -70,6 +71,7 @@ ListTagsResponse, LookerSystemSpec, LookupEntryRequest, + ModelSpec, ModifyEntryContactsRequest, ModifyEntryOverviewRequest, ReconcileTagsMetadata, @@ -91,6 +93,9 @@ UpdateTagRequest, UpdateTagTemplateFieldRequest, UpdateTagTemplateRequest, + VertexDatasetSpec, + VertexModelSourceInfo, + VertexModelSpec, ) from .types.dataplex_spec import ( DataplexExternalTable, @@ -172,6 +177,7 @@ "DataplexFilesetSpec", "DataplexSpec", "DataplexTableSpec", + "DatasetSpec", "DeleteEntryGroupRequest", "DeleteEntryRequest", "DeletePolicyTagRequest", @@ -215,6 +221,7 @@ "LookerSystemSpec", "LookupEntryRequest", "ManagingSystem", + "ModelSpec", "ModifyEntryContactsRequest", "ModifyEntryOverviewRequest", "PersonalDetails", @@ -261,5 +268,8 @@ "UpdateTaxonomyRequest", "UsageSignal", "UsageStats", + "VertexDatasetSpec", + "VertexModelSourceInfo", + "VertexModelSpec", "ViewSpec", ) diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py index dcf8378be674..360a0d13ebdd 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.15.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py index e8d81173595f..58474633e89c 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py @@ -1054,7 +1054,7 @@ async def sample_create_entry(): # Initialize request argument(s) entry = datacatalog_v1.Entry() entry.type_ = "LOOK" - entry.integrated_system = "LOOKER" + entry.integrated_system = "VERTEX_AI" entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] request = datacatalog_v1.CreateEntryRequest( @@ -1200,7 +1200,7 @@ async def sample_update_entry(): # Initialize request argument(s) entry = datacatalog_v1.Entry() entry.type_ = "LOOK" - entry.integrated_system = "LOOKER" + entry.integrated_system = "VERTEX_AI" entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] request = datacatalog_v1.UpdateEntryRequest( diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py index d6f5feb4c4d9..1d2c1017a707 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py @@ -1391,7 +1391,7 @@ def sample_create_entry(): # Initialize request argument(s) entry = datacatalog_v1.Entry() entry.type_ = "LOOK" - entry.integrated_system = "LOOKER" + entry.integrated_system = "VERTEX_AI" entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] request = datacatalog_v1.CreateEntryRequest( @@ -1537,7 +1537,7 @@ def sample_update_entry(): # Initialize request argument(s) entry = datacatalog_v1.Entry() entry.type_ = "LOOK" - entry.integrated_system = "LOOKER" + entry.integrated_system = "VERTEX_AI" entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] request = datacatalog_v1.UpdateEntryRequest( diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/__init__.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/__init__.py index 6a75dbf66593..e0becd2f9a53 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/__init__.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/__init__.py @@ -31,6 +31,7 @@ CreateTagTemplateFieldRequest, CreateTagTemplateRequest, DatabaseTableSpec, + DatasetSpec, DataSourceConnectionSpec, DeleteEntryGroupRequest, DeleteEntryRequest, @@ -56,6 +57,7 @@ ListTagsResponse, LookerSystemSpec, LookupEntryRequest, + ModelSpec, ModifyEntryContactsRequest, ModifyEntryOverviewRequest, ReconcileTagsMetadata, @@ -77,6 +79,9 @@ UpdateTagRequest, UpdateTagTemplateFieldRequest, UpdateTagTemplateRequest, + VertexDatasetSpec, + VertexModelSourceInfo, + VertexModelSpec, ) from .dataplex_spec import ( DataplexExternalTable, @@ -146,6 +151,7 @@ "CreateTagTemplateFieldRequest", "CreateTagTemplateRequest", "DatabaseTableSpec", + "DatasetSpec", "DataSourceConnectionSpec", "DeleteEntryGroupRequest", "DeleteEntryRequest", @@ -170,6 +176,7 @@ "ListTagsResponse", "LookerSystemSpec", "LookupEntryRequest", + "ModelSpec", "ModifyEntryContactsRequest", "ModifyEntryOverviewRequest", "ReconcileTagsMetadata", @@ -191,6 +198,9 @@ "UpdateTagRequest", "UpdateTagTemplateFieldRequest", "UpdateTagTemplateRequest", + "VertexDatasetSpec", + "VertexModelSourceInfo", + "VertexModelSpec", "EntryType", "DataplexExternalTable", "DataplexFilesetSpec", diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/common.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/common.py index 9d2c0d09c93c..8ed822abc932 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/common.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/common.py @@ -53,6 +53,8 @@ class IntegratedSystem(proto.Enum): Cloud Sql LOOKER (9): Looker + VERTEX_AI (10): + Vertex AI """ INTEGRATED_SYSTEM_UNSPECIFIED = 0 BIGQUERY = 1 @@ -63,6 +65,7 @@ class IntegratedSystem(proto.Enum): CLOUD_BIGTABLE = 7 CLOUD_SQL = 8 LOOKER = 9 + VERTEX_AI = 10 class ManagingSystem(proto.Enum): diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/datacatalog.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/datacatalog.py index 70df39034914..70dbe9ea175f 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/datacatalog.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/datacatalog.py @@ -52,11 +52,16 @@ "FilesetSpec", "DataSourceConnectionSpec", "RoutineSpec", + "DatasetSpec", "SqlDatabaseSystemSpec", "LookerSystemSpec", "CloudBigtableSystemSpec", "CloudBigtableInstanceSpec", "ServiceSpec", + "VertexModelSourceInfo", + "VertexModelSpec", + "VertexDatasetSpec", + "ModelSpec", "BusinessContext", "EntryOverview", "Contacts", @@ -930,6 +935,10 @@ class Entry(proto.Message): Specification that applies to a user-defined function or procedure. Valid only for entries with the ``ROUTINE`` type. + This field is a member of `oneof`_ ``spec``. + dataset_spec (google.cloud.datacatalog_v1.types.DatasetSpec): + Specification that applies to a dataset. + This field is a member of `oneof`_ ``spec``. fileset_spec (google.cloud.datacatalog_v1.types.FilesetSpec): Specification that applies to a fileset resource. Valid only @@ -940,6 +949,10 @@ class Entry(proto.Message): Specification that applies to a Service resource. + This field is a member of `oneof`_ ``spec``. + model_spec (google.cloud.datacatalog_v1.types.ModelSpec): + Model specification. + This field is a member of `oneof`_ ``spec``. display_name (str): Display name of an entry. @@ -1075,6 +1088,12 @@ class Entry(proto.Message): oneof="spec", message="RoutineSpec", ) + dataset_spec: "DatasetSpec" = proto.Field( + proto.MESSAGE, + number=32, + oneof="spec", + message="DatasetSpec", + ) fileset_spec: "FilesetSpec" = proto.Field( proto.MESSAGE, number=33, @@ -1087,6 +1106,12 @@ class Entry(proto.Message): oneof="spec", message="ServiceSpec", ) + model_spec: "ModelSpec" = proto.Field( + proto.MESSAGE, + number=43, + oneof="spec", + message="ModelSpec", + ) display_name: str = proto.Field( proto.STRING, number=3, @@ -1392,6 +1417,28 @@ class Mode(proto.Enum): ) +class DatasetSpec(proto.Message): + r"""Specification that applies to a dataset. Valid only for entries with + the ``DATASET`` type. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + vertex_dataset_spec (google.cloud.datacatalog_v1.types.VertexDatasetSpec): + Vertex AI Dataset specific fields + + This field is a member of `oneof`_ ``system_spec``. + """ + + vertex_dataset_spec: "VertexDatasetSpec" = proto.Field( + proto.MESSAGE, + number=2, + oneof="system_spec", + message="VertexDatasetSpec", + ) + + class SqlDatabaseSystemSpec(proto.Message): r"""Specification that applies to entries that are part ``SQL_DATABASE`` system (user_specified_type) @@ -1566,6 +1613,198 @@ class ServiceSpec(proto.Message): ) +class VertexModelSourceInfo(proto.Message): + r"""Detail description of the source information of a Vertex + model. + + Attributes: + source_type (google.cloud.datacatalog_v1.types.VertexModelSourceInfo.ModelSourceType): + Type of the model source. + copy (bool): + If this Model is copy of another Model. If true then + [source_type][google.cloud.datacatalog.v1.VertexModelSourceInfo.source_type] + pertains to the original. + """ + + class ModelSourceType(proto.Enum): + r"""Source of the model. + + Values: + MODEL_SOURCE_TYPE_UNSPECIFIED (0): + Should not be used. + AUTOML (1): + The Model is uploaded by automl training + pipeline. + CUSTOM (2): + The Model is uploaded by user or custom + training pipeline. + BQML (3): + The Model is registered and sync'ed from + BigQuery ML. + MODEL_GARDEN (4): + The Model is saved or tuned from Model + Garden. + """ + MODEL_SOURCE_TYPE_UNSPECIFIED = 0 + AUTOML = 1 + CUSTOM = 2 + BQML = 3 + MODEL_GARDEN = 4 + + source_type: ModelSourceType = proto.Field( + proto.ENUM, + number=1, + enum=ModelSourceType, + ) + copy: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class VertexModelSpec(proto.Message): + r"""Specification for vertex model resources. + + Attributes: + version_id (str): + The version ID of the model. + version_aliases (MutableSequence[str]): + User provided version aliases so that a model + version can be referenced via alias + version_description (str): + The description of this version. + vertex_model_source_info (google.cloud.datacatalog_v1.types.VertexModelSourceInfo): + Source of a Vertex model. + container_image_uri (str): + URI of the Docker image to be used as the + custom container for serving predictions. + """ + + version_id: str = proto.Field( + proto.STRING, + number=1, + ) + version_aliases: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + version_description: str = proto.Field( + proto.STRING, + number=3, + ) + vertex_model_source_info: "VertexModelSourceInfo" = proto.Field( + proto.MESSAGE, + number=4, + message="VertexModelSourceInfo", + ) + container_image_uri: str = proto.Field( + proto.STRING, + number=5, + ) + + +class VertexDatasetSpec(proto.Message): + r"""Specification for vertex dataset resources. + + Attributes: + data_item_count (int): + The number of DataItems in this Dataset. Only + apply for non-structured Dataset. + data_type (google.cloud.datacatalog_v1.types.VertexDatasetSpec.DataType): + Type of the dataset. + """ + + class DataType(proto.Enum): + r"""Type of data stored in the dataset. + + Values: + DATA_TYPE_UNSPECIFIED (0): + Should not be used. + TABLE (1): + Structured data dataset. + IMAGE (2): + Image dataset which supports + ImageClassification, ImageObjectDetection and + ImageSegmentation problems. + TEXT (3): + Document dataset which supports + TextClassification, TextExtraction and + TextSentiment problems. + VIDEO (4): + Video dataset which supports + VideoClassification, VideoObjectTracking and + VideoActionRecognition problems. + CONVERSATION (5): + Conversation dataset which supports + conversation problems. + TIME_SERIES (6): + TimeSeries dataset. + DOCUMENT (7): + Document dataset which supports + DocumentAnnotation problems. + TEXT_TO_SPEECH (8): + TextToSpeech dataset which supports + TextToSpeech problems. + TRANSLATION (9): + Translation dataset which supports + Translation problems. + STORE_VISION (10): + Store Vision dataset which is used for HITL + integration. + ENTERPRISE_KNOWLEDGE_GRAPH (11): + Enterprise Knowledge Graph dataset which is + used for HITL labeling integration. + TEXT_PROMPT (12): + Text prompt dataset which supports Large + Language Models. + """ + DATA_TYPE_UNSPECIFIED = 0 + TABLE = 1 + IMAGE = 2 + TEXT = 3 + VIDEO = 4 + CONVERSATION = 5 + TIME_SERIES = 6 + DOCUMENT = 7 + TEXT_TO_SPEECH = 8 + TRANSLATION = 9 + STORE_VISION = 10 + ENTERPRISE_KNOWLEDGE_GRAPH = 11 + TEXT_PROMPT = 12 + + data_item_count: int = proto.Field( + proto.INT64, + number=1, + ) + data_type: DataType = proto.Field( + proto.ENUM, + number=2, + enum=DataType, + ) + + +class ModelSpec(proto.Message): + r"""Specification that applies to a model. Valid only for entries with + the ``MODEL`` type. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + vertex_model_spec (google.cloud.datacatalog_v1.types.VertexModelSpec): + Specification for vertex model resources. + + This field is a member of `oneof`_ ``system_spec``. + """ + + vertex_model_spec: "VertexModelSpec" = proto.Field( + proto.MESSAGE, + number=1, + oneof="system_spec", + message="VertexModelSpec", + ) + + class BusinessContext(proto.Message): r"""Business Context of the entry. diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py index dcf8378be674..360a0d13ebdd 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.15.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/usage.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/usage.py index 3d5b1ea998a9..96afe59f525d 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/usage.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/usage.py @@ -37,7 +37,7 @@ class UsageStats(proto.Message): - The usage stats only include BigQuery query jobs - The usage stats might be underestimated, e.g. wildcard table references are not yet counted in usage computation - https://cloud.google.com/bigquery/docs/querying-wildcard-tables + https://cloud.google.com/bigquery/docs/querying-wildcard-tables Attributes: total_completions (float): diff --git a/packages/google-cloud-datacatalog/noxfile.py b/packages/google-cloud-datacatalog/noxfile.py index 9a2acd8b6787..be54712bfa8f 100644 --- a/packages/google-cloud-datacatalog/noxfile.py +++ b/packages/google-cloud-datacatalog/noxfile.py @@ -46,7 +46,7 @@ UNIT_TEST_EXTRAS = [] UNIT_TEST_EXTRAS_BY_PYTHON = {} -SYSTEM_TEST_PYTHON_VERSIONS = [] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] SYSTEM_TEST_STANDARD_DEPENDENCIES = [ "mock", "pytest", @@ -405,24 +405,3 @@ def prerelease_deps(session): session.run("python", "-c", "import google.auth; print(google.auth.__version__)") session.run("py.test", "tests/unit") - - system_test_path = os.path.join("tests", "system.py") - system_test_folder_path = os.path.join("tests", "system") - - # Only run system tests if found. - if os.path.exists(system_test_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_path, - *session.posargs, - ) - if os.path.exists(system_test_folder_path): - session.run( - "py.test", - "--verbose", - f"--junitxml=system_{session.python}_sponge_log.xml", - system_test_folder_path, - *session.posargs, - ) diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_async.py b/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_async.py index 9732587f7f27..54c6f72a5136 100644 --- a/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_async.py +++ b/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_async.py @@ -41,7 +41,7 @@ async def sample_create_entry(): # Initialize request argument(s) entry = datacatalog_v1.Entry() entry.type_ = "LOOK" - entry.integrated_system = "LOOKER" + entry.integrated_system = "VERTEX_AI" entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] request = datacatalog_v1.CreateEntryRequest( diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_sync.py b/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_sync.py index 5f8749e42087..16317907ef44 100644 --- a/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_sync.py +++ b/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_create_entry_sync.py @@ -41,7 +41,7 @@ def sample_create_entry(): # Initialize request argument(s) entry = datacatalog_v1.Entry() entry.type_ = "LOOK" - entry.integrated_system = "LOOKER" + entry.integrated_system = "VERTEX_AI" entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] request = datacatalog_v1.CreateEntryRequest( diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_async.py b/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_async.py index f368603a80f6..edfcd1577e86 100644 --- a/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_async.py +++ b/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_async.py @@ -41,7 +41,7 @@ async def sample_update_entry(): # Initialize request argument(s) entry = datacatalog_v1.Entry() entry.type_ = "LOOK" - entry.integrated_system = "LOOKER" + entry.integrated_system = "VERTEX_AI" entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] request = datacatalog_v1.UpdateEntryRequest( diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_sync.py b/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_sync.py index a4b6f4c48184..ae8cedd82586 100644 --- a/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_sync.py +++ b/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_update_entry_sync.py @@ -41,7 +41,7 @@ def sample_update_entry(): # Initialize request argument(s) entry = datacatalog_v1.Entry() entry.type_ = "LOOK" - entry.integrated_system = "LOOKER" + entry.integrated_system = "VERTEX_AI" entry.gcs_fileset_spec.file_patterns = ['file_patterns_value1', 'file_patterns_value2'] request = datacatalog_v1.UpdateEntryRequest( diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json index 187663629fc1..1659a652e62c 100644 --- a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json +++ b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog", - "version": "3.15.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json index 8872ba807a0a..14e0e75feb01 100644 --- a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json +++ b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog", - "version": "3.15.2" + "version": "0.1.0" }, "snippets": [ { From 5e24e4581f99f784601e3e14145caa8fcdb630ea Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Sat, 30 Sep 2023 06:45:02 -0400 Subject: [PATCH 3/3] See https://github.com/googleapis/gapic-generator-python/issues/1722 --- .../google/cloud/datacatalog_v1beta1/types/usage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/usage.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/usage.py index 96afe59f525d..3d5b1ea998a9 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/usage.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/types/usage.py @@ -37,7 +37,7 @@ class UsageStats(proto.Message): - The usage stats only include BigQuery query jobs - The usage stats might be underestimated, e.g. wildcard table references are not yet counted in usage computation - https://cloud.google.com/bigquery/docs/querying-wildcard-tables + https://cloud.google.com/bigquery/docs/querying-wildcard-tables Attributes: total_completions (float):