From 51947fb5913a3859ee45cd66a0dbad51a84cebe9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 4 Sep 2024 08:07:56 -0400 Subject: [PATCH] feat: [google-cloud-discoveryengine] promote search tuning service to v1 (#13049) - [ ] Regenerate this pull request now. BEGIN_COMMIT_OVERRIDE feat(v1): promote search tuning service to v1 feat(v1): promot user event purge to v1 feat(v1): return structured document info in answers feat(v1): return index status in document feat(v1): support batch documents purge with GCS input feat(v1): support batch get documents metadata by uri patterns feat(v1): return joined status in user event docs(v1): keep the API doc up-to-date with recent changes feat(v1beta): support natural language understanding in search feat(v1beta): allow set relevance threshold on search feat(v1beta): support one box search feat(v1beta): return structured document info in answers feat(v1beta): return index status in document feat(v1beta): support batch documents purge with GCS input feat(v1beta): support batch get documents metadata by uri patterns feat(v1beta): return joined status in user event docs(v1beta): keep the API doc up-to-date with recent changes feat(v1alpha): return index status in document feat(v1alpha): support creating workspace search data stores feat(v1alpha): support batch get documents metadata by uri patterns feat(v1alpha): return query segment in NL query understanding feat(v1alpha): return joined status in user event docs(v1alpha): keep the API doc up-to-date with recent changes END_COMMIT_OVERRIDE PiperOrigin-RevId: 670771871 Source-Link: https://github.com/googleapis/googleapis/commit/5314818275923044fb5af690c5ad85c3428d0842 Source-Link: https://github.com/googleapis/googleapis-gen/commit/85cebbbe5a100fa106d93dac34dacbf19a3a6a48 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRpc2NvdmVyeWVuZ2luZS8uT3dsQm90LnlhbWwiLCJoIjoiODVjZWJiYmU1YTEwMGZhMTA2ZDkzZGFjMzRkYWNiZjE5YTNhNmE0OCJ9 BEGIN_NESTED_COMMIT feat: [google-cloud-discoveryengine] support natural language understanding in search feat: allow set relevance threshold on search feat: support one box search feat: return structured document info in answers feat: return index status in document feat: support batch documents purge with GCS input feat: support batch get documents metadata by uri patterns feat: return joined status in user event docs: keep the API doc up-to-date with recent changes PiperOrigin-RevId: 670771759 Source-Link: https://github.com/googleapis/googleapis/commit/6cb0a970d16964a22626a61d9f7a1cc879f0fc7a Source-Link: https://github.com/googleapis/googleapis-gen/commit/ad7bcde915dc55058df32077a44de7a5cf5cb1fc Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRpc2NvdmVyeWVuZ2luZS8uT3dsQm90LnlhbWwiLCJoIjoiYWQ3YmNkZTkxNWRjNTUwNThkZjMyMDc3YTQ0ZGU3YTVjZjVjYjFmYyJ9 END_NESTED_COMMIT BEGIN_NESTED_COMMIT feat: [google-cloud-discoveryengine] return index status in document feat: support creating workspace search data stores feat: support batch get documents metadata by uri patterns feat: return query segment in NL query understanding feat: return joined status in user event docs: keep the API doc up-to-date with recent changes PiperOrigin-RevId: 670770678 Source-Link: https://github.com/googleapis/googleapis/commit/ce9d18865ea37d50d772665c36949723afe91ddf Source-Link: https://github.com/googleapis/googleapis-gen/commit/a0866c4cd0b20e0ab502590b5228c197e42dc17e Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRpc2NvdmVyeWVuZ2luZS8uT3dsQm90LnlhbWwiLCJoIjoiYTA4NjZjNGNkMGIyMGUwYWI1MDI1OTBiNTIyOGMxOTdlNDJkYzE3ZSJ9 END_NESTED_COMMIT --------- Co-authored-by: Owl Bot --- .../search_tuning_service.rst | 6 + .../docs/discoveryengine_v1/services_.rst | 1 + .../google/cloud/discoveryengine/__init__.py | 6 + .../cloud/discoveryengine_v1/__init__.py | 32 + .../discoveryengine_v1/gapic_metadata.json | 79 + .../completion_service/transports/rest.py | 16 + .../services/control_service/async_client.py | 15 +- .../services/control_service/client.py | 15 +- .../control_service/transports/rest.py | 29 +- .../transports/rest.py | 8 + .../data_store_service/transports/rest.py | 16 + .../services/document_service/async_client.py | 116 + .../services/document_service/client.py | 115 + .../document_service/transports/base.py | 17 + .../document_service/transports/grpc.py | 31 + .../transports/grpc_asyncio.py | 36 + .../document_service/transports/rest.py | 159 + .../engine_service/transports/rest.py | 16 + .../transports/rest.py | 8 + .../project_service/transports/rest.py | 16 + .../services/rank_service/transports/rest.py | 8 + .../recommendation_service/transports/rest.py | 8 + .../schema_service/transports/rest.py | 16 + .../search_service/transports/rest.py | 8 + .../search_tuning_service/__init__.py | 22 + .../search_tuning_service/async_client.py | 654 +++ .../services/search_tuning_service/client.py | 1097 +++++ .../transports/__init__.py | 38 + .../search_tuning_service/transports/base.py | 212 + .../search_tuning_service/transports/grpc.py | 372 ++ .../transports/grpc_asyncio.py | 390 ++ .../search_tuning_service/transports/rest.py | 1034 +++++ .../transports/rest.py | 16 + .../user_event_service/async_client.py | 105 + .../services/user_event_service/client.py | 103 + .../user_event_service/transports/base.py | 15 + .../user_event_service/transports/grpc.py | 31 + .../transports/grpc_asyncio.py | 38 + .../user_event_service/transports/rest.py | 157 + .../discoveryengine_v1/types/__init__.py | 26 + .../cloud/discoveryengine_v1/types/answer.py | 88 +- .../cloud/discoveryengine_v1/types/control.py | 13 +- .../types/conversational_search_service.py | 17 +- .../types/custom_tuning_model.py | 125 + .../types/data_store_service.py | 15 + .../discoveryengine_v1/types/document.py | 39 + .../types/document_processing_config.py | 2 + .../types/document_service.py | 172 + .../types/grounded_generation_service.py | 3 +- .../discoveryengine_v1/types/import_config.py | 41 +- .../discoveryengine_v1/types/purge_config.py | 198 + .../types/recommendation_service.py | 3 +- .../types/search_service.py | 63 +- .../types/search_tuning_service.py | 271 ++ .../discoveryengine_v1/types/user_event.py | 7 + .../cloud/discoveryengine_v1alpha/__init__.py | 7 +- .../gapic_metadata.json | 15 + .../acl_config_service/async_client.py | 2 +- .../services/acl_config_service/client.py | 2 +- .../acl_config_service/transports/grpc.py | 2 +- .../transports/grpc_asyncio.py | 2 +- .../services/document_service/async_client.py | 112 + .../services/document_service/client.py | 111 + .../document_service/transports/base.py | 17 + .../document_service/transports/grpc.py | 31 + .../transports/grpc_asyncio.py | 36 + .../document_service/transports/rest.py | 143 + .../discoveryengine_v1alpha/types/__init__.py | 7 +- .../discoveryengine_v1alpha/types/answer.py | 7 + .../discoveryengine_v1alpha/types/control.py | 2 +- .../types/custom_tuning_model.py | 4 + .../types/data_store.py | 70 + .../discoveryengine_v1alpha/types/document.py | 39 + .../types/document_service.py | 162 + .../types/grounded_generation_service.py | 3 +- .../types/import_config.py | 32 +- .../types/search_service.py | 31 +- .../types/user_event.py | 7 + .../cloud/discoveryengine_v1beta/__init__.py | 6 + .../gapic_metadata.json | 15 + .../services/document_service/async_client.py | 116 + .../services/document_service/client.py | 115 + .../document_service/transports/base.py | 17 + .../document_service/transports/grpc.py | 31 + .../transports/grpc_asyncio.py | 36 + .../document_service/transports/rest.py | 143 + .../discoveryengine_v1beta/types/__init__.py | 6 + .../discoveryengine_v1beta/types/answer.py | 79 +- .../discoveryengine_v1beta/types/control.py | 2 +- .../types/conversational_search_service.py | 3 + .../types/custom_tuning_model.py | 4 + .../discoveryengine_v1beta/types/document.py | 39 + .../types/document_processing_config.py | 2 + .../types/document_service.py | 173 + .../types/grounded_generation_service.py | 3 +- .../types/import_config.py | 41 +- .../types/purge_config.py | 84 + .../types/search_service.py | 157 +- .../types/user_event.py | 7 + ...vice_batch_get_documents_metadata_async.py | 52 + ...rvice_batch_get_documents_metadata_sync.py | 52 + ..._document_service_purge_documents_async.py | 4 + ...d_document_service_purge_documents_sync.py | 4 + ...tuning_service_list_custom_models_async.py | 52 + ..._tuning_service_list_custom_models_sync.py | 52 + ...tuning_service_train_custom_model_async.py | 56 + ..._tuning_service_train_custom_model_sync.py | 56 + ...r_event_service_purge_user_events_async.py | 57 + ...er_event_service_purge_user_events_sync.py | 57 + ...vice_batch_get_documents_metadata_async.py | 52 + ...rvice_batch_get_documents_metadata_sync.py | 52 + ...vice_batch_get_documents_metadata_async.py | 52 + ...rvice_batch_get_documents_metadata_sync.py | 52 + ..._document_service_purge_documents_async.py | 4 + ...d_document_service_purge_documents_sync.py | 4 + ...adata_google.cloud.discoveryengine.v1.json | 790 +++- ..._google.cloud.discoveryengine.v1alpha.json | 161 + ...a_google.cloud.discoveryengine.v1beta.json | 189 +- .../fixup_discoveryengine_v1_keywords.py | 8 +- .../fixup_discoveryengine_v1alpha_keywords.py | 1 + .../fixup_discoveryengine_v1beta_keywords.py | 5 +- .../test_data_store_service.py | 2 + .../test_document_service.py | 2041 ++++++--- .../test_search_tuning_service.py | 3772 +++++++++++++++++ .../test_user_event_service.py | 568 +++ .../test_data_store_service.py | 8 + .../test_document_service.py | 2131 +++++++--- .../test_user_event_service.py | 1 + .../test_document_service.py | 2041 ++++++--- .../test_evaluation_service.py | 2 + .../test_serving_config_service.py | 1 + .../test_user_event_service.py | 1 + 132 files changed, 18228 insertions(+), 2219 deletions(-) create mode 100644 packages/google-cloud-discoveryengine/docs/discoveryengine_v1/search_tuning_service.rst create mode 100644 packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/__init__.py create mode 100644 packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/async_client.py create mode 100644 packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/client.py create mode 100644 packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/__init__.py create mode 100644 packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/base.py create mode 100644 packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/grpc.py create mode 100644 packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/rest.py create mode 100644 packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/custom_tuning_model.py create mode 100644 packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_tuning_service.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_batch_get_documents_metadata_async.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_batch_get_documents_metadata_sync.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_list_custom_models_async.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_list_custom_models_sync.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_train_custom_model_async.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_train_custom_model_sync.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_purge_user_events_async.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_purge_user_events_sync.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_async.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_sync.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_async.py create mode 100644 packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_sync.py create mode 100644 packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_tuning_service.py diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/search_tuning_service.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/search_tuning_service.rst new file mode 100644 index 000000000000..02b0fe7ef583 --- /dev/null +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/search_tuning_service.rst @@ -0,0 +1,6 @@ +SearchTuningService +------------------------------------- + +.. automodule:: google.cloud.discoveryengine_v1.services.search_tuning_service + :members: + :inherited-members: diff --git a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/services_.rst b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/services_.rst index 6e0bbcda2e72..411ae24c92cc 100644 --- a/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/services_.rst +++ b/packages/google-cloud-discoveryengine/docs/discoveryengine_v1/services_.rst @@ -15,5 +15,6 @@ Services for Google Cloud Discoveryengine v1 API recommendation_service schema_service search_service + search_tuning_service site_search_engine_service user_event_service diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py index 42959becdbfc..48c89d24d6f4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/__init__.py @@ -208,6 +208,8 @@ DocumentProcessingConfig, ) from google.cloud.discoveryengine_v1beta.types.document_service import ( + BatchGetDocumentsMetadataRequest, + BatchGetDocumentsMetadataResponse, CreateDocumentRequest, DeleteDocumentRequest, GetDocumentRequest, @@ -289,6 +291,7 @@ PurgeDocumentsMetadata, PurgeDocumentsRequest, PurgeDocumentsResponse, + PurgeErrorConfig, PurgeSuggestionDenyListEntriesMetadata, PurgeSuggestionDenyListEntriesRequest, PurgeSuggestionDenyListEntriesResponse, @@ -501,6 +504,8 @@ "UpdateDataStoreRequest", "Document", "DocumentProcessingConfig", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", "CreateDocumentRequest", "DeleteDocumentRequest", "GetDocumentRequest", @@ -569,6 +574,7 @@ "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", + "PurgeErrorConfig", "PurgeSuggestionDenyListEntriesMetadata", "PurgeSuggestionDenyListEntriesRequest", "PurgeSuggestionDenyListEntriesResponse", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/__init__.py index e06104fe1669..261c72ef4b22 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/__init__.py @@ -45,6 +45,10 @@ ) from .services.schema_service import SchemaServiceAsyncClient, SchemaServiceClient from .services.search_service import SearchServiceAsyncClient, SearchServiceClient +from .services.search_tuning_service import ( + SearchTuningServiceAsyncClient, + SearchTuningServiceClient, +) from .services.site_search_engine_service import ( SiteSearchEngineServiceAsyncClient, SiteSearchEngineServiceClient, @@ -102,6 +106,7 @@ UpdateConversationRequest, UpdateSessionRequest, ) +from .types.custom_tuning_model import CustomTuningModel from .types.data_store import DataStore from .types.data_store_service import ( CreateDataStoreMetadata, @@ -116,6 +121,8 @@ from .types.document import Document from .types.document_processing_config import DocumentProcessingConfig from .types.document_service import ( + BatchGetDocumentsMetadataRequest, + BatchGetDocumentsMetadataResponse, CreateDocumentRequest, DeleteDocumentRequest, GetDocumentRequest, @@ -173,9 +180,13 @@ PurgeDocumentsMetadata, PurgeDocumentsRequest, PurgeDocumentsResponse, + PurgeErrorConfig, PurgeSuggestionDenyListEntriesMetadata, PurgeSuggestionDenyListEntriesRequest, PurgeSuggestionDenyListEntriesResponse, + PurgeUserEventsMetadata, + PurgeUserEventsRequest, + PurgeUserEventsResponse, ) from .types.rank_service import RankingRecord, RankRequest, RankResponse from .types.recommendation_service import RecommendRequest, RecommendResponse @@ -192,6 +203,13 @@ UpdateSchemaRequest, ) from .types.search_service import SearchRequest, SearchResponse +from .types.search_tuning_service import ( + ListCustomModelsRequest, + ListCustomModelsResponse, + TrainCustomModelMetadata, + TrainCustomModelRequest, + TrainCustomModelResponse, +) from .types.session import Query, Session from .types.site_search_engine import SiteSearchEngine, SiteVerificationInfo, TargetSite from .types.site_search_engine_service import ( @@ -248,6 +266,7 @@ "RecommendationServiceAsyncClient", "SchemaServiceAsyncClient", "SearchServiceAsyncClient", + "SearchTuningServiceAsyncClient", "SiteSearchEngineServiceAsyncClient", "UserEventServiceAsyncClient", "AlloyDbSource", @@ -257,6 +276,8 @@ "BatchCreateTargetSiteMetadata", "BatchCreateTargetSitesRequest", "BatchCreateTargetSitesResponse", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", "BatchVerifyTargetSitesMetadata", "BatchVerifyTargetSitesRequest", "BatchVerifyTargetSitesResponse", @@ -296,6 +317,7 @@ "CreateTargetSiteMetadata", "CreateTargetSiteRequest", "CustomAttribute", + "CustomTuningModel", "DataStore", "DataStoreServiceClient", "DeleteControlRequest", @@ -359,6 +381,8 @@ "ListControlsResponse", "ListConversationsRequest", "ListConversationsResponse", + "ListCustomModelsRequest", + "ListCustomModelsResponse", "ListDataStoresRequest", "ListDataStoresResponse", "ListDocumentsRequest", @@ -384,9 +408,13 @@ "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", + "PurgeErrorConfig", "PurgeSuggestionDenyListEntriesMetadata", "PurgeSuggestionDenyListEntriesRequest", "PurgeSuggestionDenyListEntriesResponse", + "PurgeUserEventsMetadata", + "PurgeUserEventsRequest", + "PurgeUserEventsResponse", "Query", "RankRequest", "RankResponse", @@ -407,6 +435,7 @@ "SearchResponse", "SearchServiceClient", "SearchTier", + "SearchTuningServiceClient", "SearchUseCase", "Session", "SiteSearchEngine", @@ -417,6 +446,9 @@ "SuggestionDenyListEntry", "TargetSite", "TextInput", + "TrainCustomModelMetadata", + "TrainCustomModelRequest", + "TrainCustomModelResponse", "TransactionInfo", "UpdateControlRequest", "UpdateConversationRequest", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_metadata.json b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_metadata.json index 855e95c5aceb..219241f131af 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_metadata.json +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_metadata.json @@ -506,6 +506,11 @@ "grpc": { "libraryClient": "DocumentServiceClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" @@ -546,6 +551,11 @@ "grpc-async": { "libraryClient": "DocumentServiceAsyncClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" @@ -586,6 +596,11 @@ "rest": { "libraryClient": "DocumentServiceClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" @@ -983,6 +998,55 @@ } } }, + "SearchTuningService": { + "clients": { + "grpc": { + "libraryClient": "SearchTuningServiceClient", + "rpcs": { + "ListCustomModels": { + "methods": [ + "list_custom_models" + ] + }, + "TrainCustomModel": { + "methods": [ + "train_custom_model" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SearchTuningServiceAsyncClient", + "rpcs": { + "ListCustomModels": { + "methods": [ + "list_custom_models" + ] + }, + "TrainCustomModel": { + "methods": [ + "train_custom_model" + ] + } + } + }, + "rest": { + "libraryClient": "SearchTuningServiceClient", + "rpcs": { + "ListCustomModels": { + "methods": [ + "list_custom_models" + ] + }, + "TrainCustomModel": { + "methods": [ + "train_custom_model" + ] + } + } + } + } + }, "SiteSearchEngineService": { "clients": { "grpc": { @@ -1197,6 +1261,11 @@ "import_user_events" ] }, + "PurgeUserEvents": { + "methods": [ + "purge_user_events" + ] + }, "WriteUserEvent": { "methods": [ "write_user_event" @@ -1217,6 +1286,11 @@ "import_user_events" ] }, + "PurgeUserEvents": { + "methods": [ + "purge_user_events" + ] + }, "WriteUserEvent": { "methods": [ "write_user_event" @@ -1237,6 +1311,11 @@ "import_user_events" ] }, + "PurgeUserEvents": { + "methods": [ + "purge_user_events" + ] + }, "WriteUserEvent": { "methods": [ "write_user_event" diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py index 836224789b61..d47335b10033 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py @@ -490,6 +490,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -552,6 +556,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -1302,6 +1310,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -1424,6 +1436,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/async_client.py index 35b700126b56..377edbf159d7 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/async_client.py @@ -358,8 +358,9 @@ async def sample_create_control(): Returns: google.cloud.discoveryengine_v1.types.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be - considered at serving time. Permitted actions + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions dependent on SolutionType. """ @@ -590,8 +591,9 @@ async def sample_update_control(): Returns: google.cloud.discoveryengine_v1.types.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be - considered at serving time. Permitted actions + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions dependent on SolutionType. """ @@ -702,8 +704,9 @@ async def sample_get_control(): Returns: google.cloud.discoveryengine_v1.types.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be - considered at serving time. Permitted actions + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions dependent on SolutionType. """ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/client.py index 2a900c13463d..19ac1f8332c8 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/client.py @@ -803,8 +803,9 @@ def sample_create_control(): Returns: google.cloud.discoveryengine_v1.types.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be - considered at serving time. Permitted actions + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions dependent on SolutionType. """ @@ -1029,8 +1030,9 @@ def sample_update_control(): Returns: google.cloud.discoveryengine_v1.types.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be - considered at serving time. Permitted actions + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions dependent on SolutionType. """ @@ -1138,8 +1140,9 @@ def sample_get_control(): Returns: google.cloud.discoveryengine_v1.types.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be - considered at serving time. Permitted actions + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions dependent on SolutionType. """ diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/rest.py index dc8bd898b465..7d8ea550e2a5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/rest.py @@ -414,9 +414,10 @@ def __call__( Returns: ~.gcd_control.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be considered - at serving time. Permitted actions dependent on - ``SolutionType``. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions + dependent on ``SolutionType``. """ @@ -601,9 +602,10 @@ def __call__( Returns: ~.control.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be considered - at serving time. Permitted actions dependent on - ``SolutionType``. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions + dependent on ``SolutionType``. """ @@ -791,9 +793,10 @@ def __call__( Returns: ~.gcd_control.Control: Defines a conditioned behavior to employ during serving. - Must be attached to a [ServingConfig][] to be considered - at serving time. Permitted actions dependent on - ``SolutionType``. + Must be attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + to be considered at serving time. Permitted actions + dependent on ``SolutionType``. """ @@ -1059,6 +1062,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -1181,6 +1188,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py index 84b6bc30c1a6..f1211380fe64 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py @@ -2213,6 +2213,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -2335,6 +2339,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/rest.py index 65051ae919e2..caae22741d06 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/rest.py @@ -482,6 +482,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -544,6 +548,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -1266,6 +1274,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -1388,6 +1400,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/async_client.py index 9cf6c715c029..2a549bd41562 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/async_client.py @@ -1065,7 +1065,11 @@ async def sample_purge_documents(): client = discoveryengine_v1.DocumentServiceAsyncClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) @@ -1142,6 +1146,118 @@ async def sample_purge_documents(): # Done; return the response. return response + async def batch_get_documents_metadata( + self, + request: Optional[ + Union[document_service.BatchGetDocumentsMetadataRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1.Document]s. Supported + for website search only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = await client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataRequest, dict]]): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + parent (:class:`str`): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, document_service.BatchGetDocumentsMetadataRequest): + request = document_service.BatchGetDocumentsMetadataRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.batch_get_documents_metadata + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py index 0de64afac5e6..7cd09e1391e9 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py @@ -1519,7 +1519,11 @@ def sample_purge_documents(): client = discoveryengine_v1.DocumentServiceClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) @@ -1594,6 +1598,117 @@ def sample_purge_documents(): # Done; return the response. return response + def batch_get_documents_metadata( + self, + request: Optional[ + Union[document_service.BatchGetDocumentsMetadataRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1.Document]s. Supported + for website search only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataRequest, dict]): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, document_service.BatchGetDocumentsMetadataRequest): + request = document_service.BatchGetDocumentsMetadataRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.batch_get_documents_metadata + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "DocumentServiceClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/base.py index 4df6d3f36550..5c05090f3e92 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/base.py @@ -181,6 +181,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.batch_get_documents_metadata: gapic_v1.method.wrap_method( + self.batch_get_documents_metadata, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -263,6 +268,18 @@ def purge_documents( ]: raise NotImplementedError() + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + Union[ + document_service.BatchGetDocumentsMetadataResponse, + Awaitable[document_service.BatchGetDocumentsMetadataResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc.py index 7b5a3256af7f..688476d1b16f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc.py @@ -469,6 +469,37 @@ def purge_documents( ) return self._stubs["purge_documents"] + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + document_service.BatchGetDocumentsMetadataResponse, + ]: + r"""Return a callable for the batch get documents metadata method over gRPC. + + Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1.Document]s. Supported + for website search only. + + Returns: + Callable[[~.BatchGetDocumentsMetadataRequest], + ~.BatchGetDocumentsMetadataResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_get_documents_metadata" not in self._stubs: + self._stubs["batch_get_documents_metadata"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DocumentService/BatchGetDocumentsMetadata", + request_serializer=document_service.BatchGetDocumentsMetadataRequest.serialize, + response_deserializer=document_service.BatchGetDocumentsMetadataResponse.deserialize, + ) + return self._stubs["batch_get_documents_metadata"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc_asyncio.py index 4bd82f9fdb85..7667dceb4ddb 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/grpc_asyncio.py @@ -484,6 +484,37 @@ def purge_documents( ) return self._stubs["purge_documents"] + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + Awaitable[document_service.BatchGetDocumentsMetadataResponse], + ]: + r"""Return a callable for the batch get documents metadata method over gRPC. + + Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1.Document]s. Supported + for website search only. + + Returns: + Callable[[~.BatchGetDocumentsMetadataRequest], + Awaitable[~.BatchGetDocumentsMetadataResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_get_documents_metadata" not in self._stubs: + self._stubs["batch_get_documents_metadata"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.DocumentService/BatchGetDocumentsMetadata", + request_serializer=document_service.BatchGetDocumentsMetadataRequest.serialize, + response_deserializer=document_service.BatchGetDocumentsMetadataResponse.deserialize, + ) + return self._stubs["batch_get_documents_metadata"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -531,6 +562,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.batch_get_documents_metadata: gapic_v1.method_async.wrap_method( + self.batch_get_documents_metadata, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py index 1418d3d2036b..51788a4c0f13 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py @@ -79,6 +79,14 @@ class DocumentServiceRestInterceptor: .. code-block:: python class MyCustomDocumentServiceInterceptor(DocumentServiceRestInterceptor): + def pre_batch_get_documents_metadata(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_get_documents_metadata(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_document(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -137,6 +145,31 @@ def post_update_document(self, response): """ + def pre_batch_get_documents_metadata( + self, + request: document_service.BatchGetDocumentsMetadataRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_service.BatchGetDocumentsMetadataRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for batch_get_documents_metadata + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_batch_get_documents_metadata( + self, response: document_service.BatchGetDocumentsMetadataResponse + ) -> document_service.BatchGetDocumentsMetadataResponse: + """Post-rpc interceptor for batch_get_documents_metadata + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + def pre_create_document( self, request: document_service.CreateDocumentRequest, @@ -532,6 +565,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -594,6 +631,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -621,6 +662,105 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client + class _BatchGetDocumentsMetadata(DocumentServiceRestStub): + def __hash__(self): + return hash("BatchGetDocumentsMetadata") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "matcher": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_service.BatchGetDocumentsMetadataRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Call the batch get documents + metadata method over HTTP. + + Args: + request (~.document_service.BatchGetDocumentsMetadataRequest): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_service.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*/branches/*}/batchGetDocumentsMetadata", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*/branches/*}/batchGetDocumentsMetadata", + }, + ] + request, metadata = self._interceptor.pre_batch_get_documents_metadata( + request, metadata + ) + pb_request = document_service.BatchGetDocumentsMetadataRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_service.BatchGetDocumentsMetadataResponse() + pb_resp = document_service.BatchGetDocumentsMetadataResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_get_documents_metadata(resp) + return resp + class _CreateDocument(DocumentServiceRestStub): def __hash__(self): return hash("CreateDocument") @@ -1294,6 +1434,17 @@ def __call__( resp = self._interceptor.post_update_document(resp) return resp + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + document_service.BatchGetDocumentsMetadataResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchGetDocumentsMetadata(self._session, self._host, self._interceptor) # type: ignore + @property def create_document( self, @@ -1508,6 +1659,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -1630,6 +1785,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/rest.py index 9f8887074424..5cdbcf388f23 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/rest.py @@ -477,6 +477,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -539,6 +543,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -1224,6 +1232,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -1346,6 +1358,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/rest.py index ee8a9194089c..ef84072de1d1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/rest.py @@ -533,6 +533,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -655,6 +659,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/rest.py index b8a0f24db978..86fe2e880195 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/rest.py @@ -355,6 +355,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -417,6 +421,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -705,6 +713,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -827,6 +839,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/rest.py index 8796a94833df..c4725bfa7dfa 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/rest.py @@ -524,6 +524,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -646,6 +650,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/rest.py index 8b3979d914df..dac81936a0ee 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/rest.py @@ -538,6 +538,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -660,6 +664,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py index 214d34eaee49..56d8cdb52691 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py @@ -477,6 +477,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -539,6 +543,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -1243,6 +1251,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -1365,6 +1377,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py index c8165a22227b..6d55c0b7011d 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py @@ -536,6 +536,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -658,6 +662,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/__init__.py new file mode 100644 index 000000000000..71fba7ca5f3f --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import SearchTuningServiceAsyncClient +from .client import SearchTuningServiceClient + +__all__ = ( + "SearchTuningServiceClient", + "SearchTuningServiceAsyncClient", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/async_client.py new file mode 100644 index 000000000000..af413b4bedf6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/async_client.py @@ -0,0 +1,654 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import ( + custom_tuning_model, + search_tuning_service, +) + +from .client import SearchTuningServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, SearchTuningServiceTransport +from .transports.grpc_asyncio import SearchTuningServiceGrpcAsyncIOTransport + + +class SearchTuningServiceAsyncClient: + """Service for search tuning.""" + + _client: SearchTuningServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = SearchTuningServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SearchTuningServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = SearchTuningServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = SearchTuningServiceClient._DEFAULT_UNIVERSE + + custom_tuning_model_path = staticmethod( + SearchTuningServiceClient.custom_tuning_model_path + ) + parse_custom_tuning_model_path = staticmethod( + SearchTuningServiceClient.parse_custom_tuning_model_path + ) + data_store_path = staticmethod(SearchTuningServiceClient.data_store_path) + parse_data_store_path = staticmethod( + SearchTuningServiceClient.parse_data_store_path + ) + common_billing_account_path = staticmethod( + SearchTuningServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + SearchTuningServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(SearchTuningServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + SearchTuningServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + SearchTuningServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + SearchTuningServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(SearchTuningServiceClient.common_project_path) + parse_common_project_path = staticmethod( + SearchTuningServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(SearchTuningServiceClient.common_location_path) + parse_common_location_path = staticmethod( + SearchTuningServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SearchTuningServiceAsyncClient: The constructed client. + """ + return SearchTuningServiceClient.from_service_account_info.__func__(SearchTuningServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SearchTuningServiceAsyncClient: The constructed client. + """ + return SearchTuningServiceClient.from_service_account_file.__func__(SearchTuningServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SearchTuningServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SearchTuningServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SearchTuningServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = SearchTuningServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + SearchTuningServiceTransport, + Callable[..., SearchTuningServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the search tuning service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,SearchTuningServiceTransport,Callable[..., SearchTuningServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SearchTuningServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SearchTuningServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def train_custom_model( + self, + request: Optional[ + Union[search_tuning_service.TrainCustomModelRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Trains a custom model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_train_custom_model(): + # Create a client + client = discoveryengine_v1.SearchTuningServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.TrainCustomModelRequest( + data_store="data_store_value", + ) + + # Make the request + operation = client.train_custom_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.TrainCustomModelRequest, dict]]): + The request object. Request message for + [SearchTuningService.TrainCustomModel][google.cloud.discoveryengine.v1.SearchTuningService.TrainCustomModel] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.TrainCustomModelResponse` Response of the + [TrainCustomModelRequest][google.cloud.discoveryengine.v1.TrainCustomModelRequest]. + This message is returned by the + google.longrunning.Operations.response field. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, search_tuning_service.TrainCustomModelRequest): + request = search_tuning_service.TrainCustomModelRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.train_custom_model + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store", request.data_store),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + search_tuning_service.TrainCustomModelResponse, + metadata_type=search_tuning_service.TrainCustomModelMetadata, + ) + + # Done; return the response. + return response + + async def list_custom_models( + self, + request: Optional[ + Union[search_tuning_service.ListCustomModelsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> search_tuning_service.ListCustomModelsResponse: + r"""Gets a list of all the custom models. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_list_custom_models(): + # Create a client + client = discoveryengine_v1.SearchTuningServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListCustomModelsRequest( + data_store="data_store_value", + ) + + # Make the request + response = await client.list_custom_models(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.ListCustomModelsRequest, dict]]): + The request object. Request message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.ListCustomModelsResponse: + Response message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, search_tuning_service.ListCustomModelsRequest): + request = search_tuning_service.ListCustomModelsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_custom_models + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store", request.data_store),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "SearchTuningServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SearchTuningServiceAsyncClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/client.py new file mode 100644 index 000000000000..97d62eb19d51 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/client.py @@ -0,0 +1,1097 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import ( + custom_tuning_model, + search_tuning_service, +) + +from .transports.base import DEFAULT_CLIENT_INFO, SearchTuningServiceTransport +from .transports.grpc import SearchTuningServiceGrpcTransport +from .transports.grpc_asyncio import SearchTuningServiceGrpcAsyncIOTransport +from .transports.rest import SearchTuningServiceRestTransport + + +class SearchTuningServiceClientMeta(type): + """Metaclass for the SearchTuningService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[SearchTuningServiceTransport]] + _transport_registry["grpc"] = SearchTuningServiceGrpcTransport + _transport_registry["grpc_asyncio"] = SearchTuningServiceGrpcAsyncIOTransport + _transport_registry["rest"] = SearchTuningServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[SearchTuningServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SearchTuningServiceClient(metaclass=SearchTuningServiceClientMeta): + """Service for search tuning.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "discoveryengine.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "discoveryengine.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SearchTuningServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SearchTuningServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SearchTuningServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SearchTuningServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def custom_tuning_model_path( + project: str, + location: str, + data_store: str, + custom_tuning_model: str, + ) -> str: + """Returns a fully-qualified custom_tuning_model string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}/customTuningModels/{custom_tuning_model}".format( + project=project, + location=location, + data_store=data_store, + custom_tuning_model=custom_tuning_model, + ) + + @staticmethod + def parse_custom_tuning_model_path(path: str) -> Dict[str, str]: + """Parses a custom_tuning_model path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)/customTuningModels/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def data_store_path( + project: str, + location: str, + data_store: str, + ) -> str: + """Returns a fully-qualified data_store string.""" + return "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + + @staticmethod + def parse_data_store_path(path: str) -> Dict[str, str]: + """Parses a data_store path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataStores/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = SearchTuningServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = SearchTuningServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = SearchTuningServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = SearchTuningServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = SearchTuningServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or SearchTuningServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + SearchTuningServiceTransport, + Callable[..., SearchTuningServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the search tuning service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,SearchTuningServiceTransport,Callable[..., SearchTuningServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SearchTuningServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = SearchTuningServiceClient._read_environment_variables() + self._client_cert_source = SearchTuningServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = SearchTuningServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, SearchTuningServiceTransport) + if transport_provided: + # transport is a SearchTuningServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(SearchTuningServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or SearchTuningServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[SearchTuningServiceTransport], + Callable[..., SearchTuningServiceTransport], + ] = ( + SearchTuningServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., SearchTuningServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def train_custom_model( + self, + request: Optional[ + Union[search_tuning_service.TrainCustomModelRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Trains a custom model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_train_custom_model(): + # Create a client + client = discoveryengine_v1.SearchTuningServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.TrainCustomModelRequest( + data_store="data_store_value", + ) + + # Make the request + operation = client.train_custom_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.TrainCustomModelRequest, dict]): + The request object. Request message for + [SearchTuningService.TrainCustomModel][google.cloud.discoveryengine.v1.SearchTuningService.TrainCustomModel] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.TrainCustomModelResponse` Response of the + [TrainCustomModelRequest][google.cloud.discoveryengine.v1.TrainCustomModelRequest]. + This message is returned by the + google.longrunning.Operations.response field. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, search_tuning_service.TrainCustomModelRequest): + request = search_tuning_service.TrainCustomModelRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.train_custom_model] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store", request.data_store),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + search_tuning_service.TrainCustomModelResponse, + metadata_type=search_tuning_service.TrainCustomModelMetadata, + ) + + # Done; return the response. + return response + + def list_custom_models( + self, + request: Optional[ + Union[search_tuning_service.ListCustomModelsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> search_tuning_service.ListCustomModelsResponse: + r"""Gets a list of all the custom models. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_list_custom_models(): + # Create a client + client = discoveryengine_v1.SearchTuningServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListCustomModelsRequest( + data_store="data_store_value", + ) + + # Make the request + response = client.list_custom_models(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.ListCustomModelsRequest, dict]): + The request object. Request message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1.types.ListCustomModelsResponse: + Response message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, search_tuning_service.ListCustomModelsRequest): + request = search_tuning_service.ListCustomModelsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_custom_models] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_store", request.data_store),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "SearchTuningServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("SearchTuningServiceClient",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/__init__.py new file mode 100644 index 000000000000..8c56b6549cef --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SearchTuningServiceTransport +from .grpc import SearchTuningServiceGrpcTransport +from .grpc_asyncio import SearchTuningServiceGrpcAsyncIOTransport +from .rest import SearchTuningServiceRestInterceptor, SearchTuningServiceRestTransport + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[SearchTuningServiceTransport]] +_transport_registry["grpc"] = SearchTuningServiceGrpcTransport +_transport_registry["grpc_asyncio"] = SearchTuningServiceGrpcAsyncIOTransport +_transport_registry["rest"] = SearchTuningServiceRestTransport + +__all__ = ( + "SearchTuningServiceTransport", + "SearchTuningServiceGrpcTransport", + "SearchTuningServiceGrpcAsyncIOTransport", + "SearchTuningServiceRestTransport", + "SearchTuningServiceRestInterceptor", +) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/base.py new file mode 100644 index 000000000000..cd575fdaf62d --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/base.py @@ -0,0 +1,212 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.discoveryengine_v1 import gapic_version as package_version +from google.cloud.discoveryengine_v1.types import search_tuning_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class SearchTuningServiceTransport(abc.ABC): + """Abstract transport class for SearchTuningService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "discoveryengine.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.train_custom_model: gapic_v1.method.wrap_method( + self.train_custom_model, + default_timeout=None, + client_info=client_info, + ), + self.list_custom_models: gapic_v1.method.wrap_method( + self.list_custom_models, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def train_custom_model( + self, + ) -> Callable[ + [search_tuning_service.TrainCustomModelRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_custom_models( + self, + ) -> Callable[ + [search_tuning_service.ListCustomModelsRequest], + Union[ + search_tuning_service.ListCustomModelsResponse, + Awaitable[search_tuning_service.ListCustomModelsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("SearchTuningServiceTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/grpc.py new file mode 100644 index 000000000000..614fb0942cbe --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/grpc.py @@ -0,0 +1,372 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore + +from google.cloud.discoveryengine_v1.types import search_tuning_service + +from .base import DEFAULT_CLIENT_INFO, SearchTuningServiceTransport + + +class SearchTuningServiceGrpcTransport(SearchTuningServiceTransport): + """gRPC backend transport for SearchTuningService. + + Service for search tuning. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + + @property + def train_custom_model( + self, + ) -> Callable[ + [search_tuning_service.TrainCustomModelRequest], operations_pb2.Operation + ]: + r"""Return a callable for the train custom model method over gRPC. + + Trains a custom model. + + Returns: + Callable[[~.TrainCustomModelRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "train_custom_model" not in self._stubs: + self._stubs["train_custom_model"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SearchTuningService/TrainCustomModel", + request_serializer=search_tuning_service.TrainCustomModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["train_custom_model"] + + @property + def list_custom_models( + self, + ) -> Callable[ + [search_tuning_service.ListCustomModelsRequest], + search_tuning_service.ListCustomModelsResponse, + ]: + r"""Return a callable for the list custom models method over gRPC. + + Gets a list of all the custom models. + + Returns: + Callable[[~.ListCustomModelsRequest], + ~.ListCustomModelsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_custom_models" not in self._stubs: + self._stubs["list_custom_models"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SearchTuningService/ListCustomModels", + request_serializer=search_tuning_service.ListCustomModelsRequest.serialize, + response_deserializer=search_tuning_service.ListCustomModelsResponse.deserialize, + ) + return self._stubs["list_custom_models"] + + def close(self): + self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("SearchTuningServiceGrpcTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..3b4639ec7de9 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/grpc_asyncio.py @@ -0,0 +1,390 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.discoveryengine_v1.types import search_tuning_service + +from .base import DEFAULT_CLIENT_INFO, SearchTuningServiceTransport +from .grpc import SearchTuningServiceGrpcTransport + + +class SearchTuningServiceGrpcAsyncIOTransport(SearchTuningServiceTransport): + """gRPC AsyncIO backend transport for SearchTuningService. + + Service for search tuning. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def train_custom_model( + self, + ) -> Callable[ + [search_tuning_service.TrainCustomModelRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the train custom model method over gRPC. + + Trains a custom model. + + Returns: + Callable[[~.TrainCustomModelRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "train_custom_model" not in self._stubs: + self._stubs["train_custom_model"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SearchTuningService/TrainCustomModel", + request_serializer=search_tuning_service.TrainCustomModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["train_custom_model"] + + @property + def list_custom_models( + self, + ) -> Callable[ + [search_tuning_service.ListCustomModelsRequest], + Awaitable[search_tuning_service.ListCustomModelsResponse], + ]: + r"""Return a callable for the list custom models method over gRPC. + + Gets a list of all the custom models. + + Returns: + Callable[[~.ListCustomModelsRequest], + Awaitable[~.ListCustomModelsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_custom_models" not in self._stubs: + self._stubs["list_custom_models"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.SearchTuningService/ListCustomModels", + request_serializer=search_tuning_service.ListCustomModelsRequest.serialize, + response_deserializer=search_tuning_service.ListCustomModelsResponse.deserialize, + ) + return self._stubs["list_custom_models"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.train_custom_model: gapic_v1.method_async.wrap_method( + self.train_custom_model, + default_timeout=None, + client_info=client_info, + ), + self.list_custom_models: gapic_v1.method_async.wrap_method( + self.list_custom_models, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + return self.grpc_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("SearchTuningServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/rest.py new file mode 100644 index 000000000000..7418b189ce0d --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/rest.py @@ -0,0 +1,1034 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import ( + gapic_v1, + operations_v1, + path_template, + rest_helpers, + rest_streaming, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.discoveryengine_v1.types import search_tuning_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import SearchTuningServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SearchTuningServiceRestInterceptor: + """Interceptor for SearchTuningService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SearchTuningServiceRestTransport. + + .. code-block:: python + class MyCustomSearchTuningServiceInterceptor(SearchTuningServiceRestInterceptor): + def pre_list_custom_models(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_custom_models(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_train_custom_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_train_custom_model(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SearchTuningServiceRestTransport(interceptor=MyCustomSearchTuningServiceInterceptor()) + client = SearchTuningServiceClient(transport=transport) + + + """ + + def pre_list_custom_models( + self, + request: search_tuning_service.ListCustomModelsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + search_tuning_service.ListCustomModelsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list_custom_models + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchTuningService server. + """ + return request, metadata + + def post_list_custom_models( + self, response: search_tuning_service.ListCustomModelsResponse + ) -> search_tuning_service.ListCustomModelsResponse: + """Post-rpc interceptor for list_custom_models + + Override in a subclass to manipulate the response + after it is returned by the SearchTuningService server but before + it is returned to user code. + """ + return response + + def pre_train_custom_model( + self, + request: search_tuning_service.TrainCustomModelRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + search_tuning_service.TrainCustomModelRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for train_custom_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchTuningService server. + """ + return request, metadata + + def post_train_custom_model( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for train_custom_model + + Override in a subclass to manipulate the response + after it is returned by the SearchTuningService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchTuningService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the SearchTuningService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchTuningService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the SearchTuningService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the SearchTuningService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the SearchTuningService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SearchTuningServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SearchTuningServiceRestInterceptor + + +class SearchTuningServiceRestTransport(SearchTuningServiceTransport): + """REST backend transport for SearchTuningService. + + Service for search tuning. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "discoveryengine.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SearchTuningServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'discoveryengine.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SearchTuningServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _ListCustomModels(SearchTuningServiceRestStub): + def __hash__(self): + return hash("ListCustomModels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: search_tuning_service.ListCustomModelsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> search_tuning_service.ListCustomModelsResponse: + r"""Call the list custom models method over HTTP. + + Args: + request (~.search_tuning_service.ListCustomModelsRequest): + The request object. Request message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.search_tuning_service.ListCustomModelsResponse: + Response message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{data_store=projects/*/locations/*/collections/*/dataStores/*}/customModels", + }, + ] + request, metadata = self._interceptor.pre_list_custom_models( + request, metadata + ) + pb_request = search_tuning_service.ListCustomModelsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = search_tuning_service.ListCustomModelsResponse() + pb_resp = search_tuning_service.ListCustomModelsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_custom_models(resp) + return resp + + class _TrainCustomModel(SearchTuningServiceRestStub): + def __hash__(self): + return hash("TrainCustomModel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: search_tuning_service.TrainCustomModelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the train custom model method over HTTP. + + Args: + request (~.search_tuning_service.TrainCustomModelRequest): + The request object. Request message for + [SearchTuningService.TrainCustomModel][google.cloud.discoveryengine.v1.SearchTuningService.TrainCustomModel] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{data_store=projects/*/locations/*/collections/*/dataStores/*}:trainCustomModel", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_train_custom_model( + request, metadata + ) + pb_request = search_tuning_service.TrainCustomModelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_train_custom_model(resp) + return resp + + @property + def list_custom_models( + self, + ) -> Callable[ + [search_tuning_service.ListCustomModelsRequest], + search_tuning_service.ListCustomModelsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListCustomModels(self._session, self._host, self._interceptor) # type: ignore + + @property + def train_custom_model( + self, + ) -> Callable[ + [search_tuning_service.TrainCustomModelRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TrainCustomModel(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(SearchTuningServiceRestStub): + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.dumps(transcoded_request["body"]) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(SearchTuningServiceRestStub): + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/operations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(SearchTuningServiceRestStub): + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataConnector}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*/engines/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/collections/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/branches/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*/models/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/operations", + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SearchTuningServiceRestTransport",) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/rest.py index fc45da7663ef..319dd0689231 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/rest.py @@ -727,6 +727,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -789,6 +793,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -2299,6 +2307,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -2421,6 +2433,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/async_client.py index 20dbd3c1b872..320432527156 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/async_client.py @@ -53,6 +53,7 @@ from google.cloud.discoveryengine_v1.types import ( common, import_config, + purge_config, user_event, user_event_service, ) @@ -497,6 +498,110 @@ async def sample_collect_user_event(): # Done; return the response. return response + async def purge_user_events( + self, + request: Optional[Union[purge_config.PurgeUserEventsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes permanently all user events specified by the + filter provided. Depending on the number of events + specified by the filter, this operation could take hours + or days to complete. To test a filter, use the list + command first. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + async def sample_purge_user_events(): + # Create a client + client = discoveryengine_v1.UserEventServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.PurgeUserEventsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_user_events(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1.types.PurgeUserEventsRequest, dict]]): + The request object. Request message for PurgeUserEvents + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.PurgeUserEventsResponse` Response of the PurgeUserEventsRequest. If the long running operation is + successfully done, then this message is returned by + the google.longrunning.Operations.response field. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, purge_config.PurgeUserEventsRequest): + request = purge_config.PurgeUserEventsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.purge_user_events + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + purge_config.PurgeUserEventsResponse, + metadata_type=purge_config.PurgeUserEventsMetadata, + ) + + # Done; return the response. + return response + async def import_user_events( self, request: Optional[Union[import_config.ImportUserEventsRequest, dict]] = None, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py index e2936a08eb22..c881e7747a63 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py @@ -59,6 +59,7 @@ from google.cloud.discoveryengine_v1.types import ( common, import_config, + purge_config, user_event, user_event_service, ) @@ -963,6 +964,108 @@ def sample_collect_user_event(): # Done; return the response. return response + def purge_user_events( + self, + request: Optional[Union[purge_config.PurgeUserEventsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes permanently all user events specified by the + filter provided. Depending on the number of events + specified by the filter, this operation could take hours + or days to complete. To test a filter, use the list + command first. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1 + + def sample_purge_user_events(): + # Create a client + client = discoveryengine_v1.UserEventServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.PurgeUserEventsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_user_events(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1.types.PurgeUserEventsRequest, dict]): + The request object. Request message for PurgeUserEvents + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.discoveryengine_v1.types.PurgeUserEventsResponse` Response of the PurgeUserEventsRequest. If the long running operation is + successfully done, then this message is returned by + the google.longrunning.Operations.response field. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, purge_config.PurgeUserEventsRequest): + request = purge_config.PurgeUserEventsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.purge_user_events] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + purge_config.PurgeUserEventsResponse, + metadata_type=purge_config.PurgeUserEventsMetadata, + ) + + # Done; return the response. + return response + def import_user_events( self, request: Optional[Union[import_config.ImportUserEventsRequest, dict]] = None, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/base.py index db533bd228dd..bb46c14805c3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/base.py @@ -30,6 +30,7 @@ from google.cloud.discoveryengine_v1 import gapic_version as package_version from google.cloud.discoveryengine_v1.types import ( import_config, + purge_config, user_event, user_event_service, ) @@ -145,6 +146,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.purge_user_events: gapic_v1.method.wrap_method( + self.purge_user_events, + default_timeout=None, + client_info=client_info, + ), self.import_user_events: gapic_v1.method.wrap_method( self.import_user_events, default_retry=retries.Retry( @@ -193,6 +199,15 @@ def collect_user_event( ]: raise NotImplementedError() + @property + def purge_user_events( + self, + ) -> Callable[ + [purge_config.PurgeUserEventsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def import_user_events( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc.py index 29ea6149acb1..21e5a071cbb7 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc.py @@ -27,6 +27,7 @@ from google.cloud.discoveryengine_v1.types import ( import_config, + purge_config, user_event, user_event_service, ) @@ -315,6 +316,36 @@ def collect_user_event( ) return self._stubs["collect_user_event"] + @property + def purge_user_events( + self, + ) -> Callable[[purge_config.PurgeUserEventsRequest], operations_pb2.Operation]: + r"""Return a callable for the purge user events method over gRPC. + + Deletes permanently all user events specified by the + filter provided. Depending on the number of events + specified by the filter, this operation could take hours + or days to complete. To test a filter, use the list + command first. + + Returns: + Callable[[~.PurgeUserEventsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_user_events" not in self._stubs: + self._stubs["purge_user_events"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.UserEventService/PurgeUserEvents", + request_serializer=purge_config.PurgeUserEventsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["purge_user_events"] + @property def import_user_events( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc_asyncio.py index 030bc370d024..06c828c190ac 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/grpc_asyncio.py @@ -29,6 +29,7 @@ from google.cloud.discoveryengine_v1.types import ( import_config, + purge_config, user_event, user_event_service, ) @@ -325,6 +326,38 @@ def collect_user_event( ) return self._stubs["collect_user_event"] + @property + def purge_user_events( + self, + ) -> Callable[ + [purge_config.PurgeUserEventsRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the purge user events method over gRPC. + + Deletes permanently all user events specified by the + filter provided. Depending on the number of events + specified by the filter, this operation could take hours + or days to complete. To test a filter, use the list + command first. + + Returns: + Callable[[~.PurgeUserEventsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "purge_user_events" not in self._stubs: + self._stubs["purge_user_events"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1.UserEventService/PurgeUserEvents", + request_serializer=purge_config.PurgeUserEventsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["purge_user_events"] + @property def import_user_events( self, @@ -373,6 +406,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.purge_user_events: gapic_v1.method_async.wrap_method( + self.purge_user_events, + default_timeout=None, + client_info=client_info, + ), self.import_user_events: gapic_v1.method_async.wrap_method( self.import_user_events, default_retry=retries.AsyncRetry( diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py index 2f87a0777f58..95798800bf12 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py @@ -48,6 +48,7 @@ from google.cloud.discoveryengine_v1.types import ( import_config, + purge_config, user_event, user_event_service, ) @@ -93,6 +94,14 @@ def post_import_user_events(self, response): logging.log(f"Received response: {response}") return response + def pre_purge_user_events(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_purge_user_events(self, response): + logging.log(f"Received response: {response}") + return response + def pre_write_user_event(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -153,6 +162,29 @@ def post_import_user_events( """ return response + def pre_purge_user_events( + self, + request: purge_config.PurgeUserEventsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[purge_config.PurgeUserEventsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for purge_user_events + + Override in a subclass to manipulate the request or metadata + before they are sent to the UserEventService server. + """ + return request, metadata + + def post_purge_user_events( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for purge_user_events + + Override in a subclass to manipulate the response + after it is returned by the UserEventService server but before + it is returned to user code. + """ + return response + def pre_write_user_event( self, request: user_event_service.WriteUserEventRequest, @@ -422,6 +454,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -484,6 +520,10 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", @@ -759,6 +799,107 @@ def __call__( resp = self._interceptor.post_import_user_events(resp) return resp + class _PurgeUserEvents(UserEventServiceRestStub): + def __hash__(self): + return hash("PurgeUserEvents") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: purge_config.PurgeUserEventsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the purge user events method over HTTP. + + Args: + request (~.purge_config.PurgeUserEventsRequest): + The request object. Request message for PurgeUserEvents + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/dataStores/*}/userEvents:purge", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/collections/*/dataStores/*}/userEvents:purge", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_purge_user_events( + request, metadata + ) + pb_request = purge_config.PurgeUserEventsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_purge_user_events(resp) + return resp + class _WriteUserEvent(UserEventServiceRestStub): def __hash__(self): return hash("WriteUserEvent") @@ -884,6 +1025,14 @@ def import_user_events( # In C++ this would require a dynamic_cast return self._ImportUserEvents(self._session, self._host, self._interceptor) # type: ignore + @property + def purge_user_events( + self, + ) -> Callable[[purge_config.PurgeUserEventsRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PurgeUserEvents(self._session, self._host, self._interceptor) # type: ignore + @property def write_user_event( self, @@ -1048,6 +1197,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*/operations/*}", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*/operations/*}", @@ -1170,6 +1323,10 @@ def __call__( "method": "get", "uri": "/v1/{name=projects/*/locations/*/dataStores/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/identity_mapping_stores/*}/operations", + }, { "method": "get", "uri": "/v1/{name=projects/*/locations/*}/operations", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/__init__.py index 79d6e4f7f3e3..8804192f3d63 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/__init__.py @@ -62,6 +62,7 @@ UpdateConversationRequest, UpdateSessionRequest, ) +from .custom_tuning_model import CustomTuningModel from .data_store import DataStore from .data_store_service import ( CreateDataStoreMetadata, @@ -76,6 +77,8 @@ from .document import Document from .document_processing_config import DocumentProcessingConfig from .document_service import ( + BatchGetDocumentsMetadataRequest, + BatchGetDocumentsMetadataResponse, CreateDocumentRequest, DeleteDocumentRequest, GetDocumentRequest, @@ -133,9 +136,13 @@ PurgeDocumentsMetadata, PurgeDocumentsRequest, PurgeDocumentsResponse, + PurgeErrorConfig, PurgeSuggestionDenyListEntriesMetadata, PurgeSuggestionDenyListEntriesRequest, PurgeSuggestionDenyListEntriesResponse, + PurgeUserEventsMetadata, + PurgeUserEventsRequest, + PurgeUserEventsResponse, ) from .rank_service import RankingRecord, RankRequest, RankResponse from .recommendation_service import RecommendRequest, RecommendResponse @@ -152,6 +159,13 @@ UpdateSchemaRequest, ) from .search_service import SearchRequest, SearchResponse +from .search_tuning_service import ( + ListCustomModelsRequest, + ListCustomModelsResponse, + TrainCustomModelMetadata, + TrainCustomModelRequest, + TrainCustomModelResponse, +) from .session import Query, Session from .site_search_engine import SiteSearchEngine, SiteVerificationInfo, TargetSite from .site_search_engine_service import ( @@ -240,6 +254,7 @@ "ListSessionsResponse", "UpdateConversationRequest", "UpdateSessionRequest", + "CustomTuningModel", "DataStore", "CreateDataStoreMetadata", "CreateDataStoreRequest", @@ -251,6 +266,8 @@ "UpdateDataStoreRequest", "Document", "DocumentProcessingConfig", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", "CreateDocumentRequest", "DeleteDocumentRequest", "GetDocumentRequest", @@ -302,9 +319,13 @@ "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", + "PurgeErrorConfig", "PurgeSuggestionDenyListEntriesMetadata", "PurgeSuggestionDenyListEntriesRequest", "PurgeSuggestionDenyListEntriesResponse", + "PurgeUserEventsMetadata", + "PurgeUserEventsRequest", + "PurgeUserEventsResponse", "RankingRecord", "RankRequest", "RankResponse", @@ -322,6 +343,11 @@ "UpdateSchemaRequest", "SearchRequest", "SearchResponse", + "ListCustomModelsRequest", + "ListCustomModelsResponse", + "TrainCustomModelMetadata", + "TrainCustomModelRequest", + "TrainCustomModelResponse", "Query", "Session", "SiteSearchEngine", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/answer.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/answer.py index 8615cafa87d4..290bf922e3c3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/answer.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/answer.py @@ -105,6 +105,18 @@ class AnswerSkippedReason(proto.Enum): Google skips the answer if there is no relevant content in the retrieved search results. + JAIL_BREAKING_QUERY_IGNORED (6): + The jail-breaking query ignored case. + + For example, "Reply in the tone of a competing + company's CEO". Google skips the answer if the + query is classified as a jail-breaking query. + CUSTOMER_POLICY_VIOLATION (7): + The customer policy violation case. + + Google skips the summary if there is a customer + policy violation detected. The policy is defined + by the customer. """ ANSWER_SKIPPED_REASON_UNSPECIFIED = 0 ADVERSARIAL_QUERY_IGNORED = 1 @@ -112,6 +124,8 @@ class AnswerSkippedReason(proto.Enum): OUT_OF_DOMAIN_QUERY_IGNORED = 3 POTENTIAL_POLICY_VIOLATION = 4 NO_RELEVANT_CONTENT = 5 + JAIL_BREAKING_QUERY_IGNORED = 6 + CUSTOMER_POLICY_VIOLATION = 7 class Citation(proto.Message): r"""Citation info for a segment. @@ -171,6 +185,10 @@ class Reference(proto.Message): chunk_info (google.cloud.discoveryengine_v1.types.Answer.Reference.ChunkInfo): Chunk information. + This field is a member of `oneof`_ ``content``. + structured_document_info (google.cloud.discoveryengine_v1.types.Answer.Reference.StructuredDocumentInfo): + Structured document information. + This field is a member of `oneof`_ ``content``. """ @@ -196,11 +214,22 @@ class UnstructuredDocumentInfo(proto.Message): class ChunkContent(proto.Message): r"""Chunk content. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: content (str): Chunk textual content. page_identifier (str): Page identifier. + relevance_score (float): + The relevance of the chunk for a given query. + Values range from 0.0 (completely irrelevant) to + 1.0 (completely relevant). This value is for + informational purpose only. It may change for + the same query and chunk at any time due to a + model retraining or change in implementation. + + This field is a member of `oneof`_ ``_relevance_score``. """ content: str = proto.Field( @@ -211,6 +240,11 @@ class ChunkContent(proto.Message): proto.STRING, number=2, ) + relevance_score: float = proto.Field( + proto.FLOAT, + number=3, + optional=True, + ) document: str = proto.Field( proto.STRING, @@ -248,7 +282,12 @@ class ChunkInfo(proto.Message): content (str): Chunk textual content. relevance_score (float): - Relevance score. + The relevance of the chunk for a given query. + Values range from 0.0 (completely irrelevant) to + 1.0 (completely relevant). This value is for + informational purpose only. It may change for + the same query and chunk at any time due to a + model retraining or change in implementation. This field is a member of `oneof`_ ``_relevance_score``. document_metadata (google.cloud.discoveryengine_v1.types.Answer.Reference.ChunkInfo.DocumentMetadata): @@ -316,6 +355,26 @@ class DocumentMetadata(proto.Message): ) ) + class StructuredDocumentInfo(proto.Message): + r"""Structured search information. + + Attributes: + document (str): + Document resource name. + struct_data (google.protobuf.struct_pb2.Struct): + Structured search data. + """ + + document: str = proto.Field( + proto.STRING, + number=1, + ) + struct_data: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + unstructured_document_info: "Answer.Reference.UnstructuredDocumentInfo" = ( proto.Field( proto.MESSAGE, @@ -330,6 +389,14 @@ class DocumentMetadata(proto.Message): oneof="content", message="Answer.Reference.ChunkInfo", ) + structured_document_info: "Answer.Reference.StructuredDocumentInfo" = ( + proto.Field( + proto.MESSAGE, + number=3, + oneof="content", + message="Answer.Reference.StructuredDocumentInfo", + ) + ) class Step(proto.Message): r"""Step information. @@ -417,11 +484,10 @@ class SearchResult(proto.Message): If citation_type is CHUNK_LEVEL_CITATION and chunk mode is on, populate chunk info. struct_data (google.protobuf.struct_pb2.Struct): - Data representation. The structured JSON data for the - document. It's populated from the struct data from the - Document (code pointer: http://shortn/_objzAfIiHq), or the - Chunk in search result (code pointer: - http://shortn/_Ipo6KFFGBL). + Data representation. + The structured JSON data for the document. + It's populated from the struct data from the + Document, or the Chunk in search result. """ class SnippetInfo(proto.Message): @@ -455,7 +521,12 @@ class ChunkInfo(proto.Message): content (str): Chunk textual content. relevance_score (float): - Relevance score. + The relevance of the chunk for a given query. + Values range from 0.0 (completely irrelevant) to + 1.0 (completely relevant). This value is for + informational purpose only. It may change for + the same query and chunk at any time due to a + model retraining or change in implementation. This field is a member of `oneof`_ ``_relevance_score``. """ @@ -573,10 +644,13 @@ class Type(proto.Enum): Adversarial query classification type. NON_ANSWER_SEEKING_QUERY (2): Non-answer-seeking query classification type. + JAIL_BREAKING_QUERY (3): + Jail-breaking query classification type. """ TYPE_UNSPECIFIED = 0 ADVERSARIAL_QUERY = 1 NON_ANSWER_SEEKING_QUERY = 2 + JAIL_BREAKING_QUERY = 3 type_: "Answer.QueryUnderstandingInfo.QueryClassificationInfo.Type" = ( proto.Field( diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/control.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/control.py index db871cdc66b1..2e7748c91522 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/control.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/control.py @@ -111,8 +111,10 @@ class TimeRange(proto.Message): class Control(proto.Message): r"""Defines a conditioned behavior to employ during serving. Must be - attached to a [ServingConfig][] to be considered at serving time. - Permitted actions dependent on ``SolutionType``. + attached to a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] to be + considered at serving time. Permitted actions dependent on + ``SolutionType``. This message has `oneof`_ fields (mutually exclusive fields). For each oneof, at most one member field can be set at the same time. @@ -150,9 +152,10 @@ class Control(proto.Message): 128 characters. Otherwise an INVALID ARGUMENT error is thrown. associated_serving_config_ids (MutableSequence[str]): - Output only. List of all [ServingConfig][] ids this control - is attached to. May take up to 10 minutes to update after - changes. + Output only. List of all + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig] + IDs this control is attached to. May take up to 10 minutes + to update after changes. solution_type (google.cloud.discoveryengine_v1.types.SolutionType): Required. Immutable. What solution the control belongs to. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversational_search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversational_search_service.py index 61b087c0b75e..b979c3de69bf 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/conversational_search_service.py @@ -654,14 +654,8 @@ class SearchParams(proto.Message): returned. search_result_mode (google.cloud.discoveryengine_v1.types.SearchRequest.ContentSearchSpec.SearchResultMode): Specifies the search result mode. If unspecified, the search - result mode is based on - [DataStore.DocumentProcessingConfig.chunking_config][]: - - - If [DataStore.DocumentProcessingConfig.chunking_config][] - is specified, it defaults to ``CHUNKS``. - - Otherwise, it defaults to ``DOCUMENTS``. See `parse and - chunk - documents `__ + result mode defaults to ``DOCUMENTS``. See `parse and chunk + documents `__ data_store_specs (MutableSequence[google.cloud.discoveryengine_v1.types.SearchRequest.DataStoreSpec]): Specs defining dataStores to filter on in a search call and configurations for those @@ -924,10 +918,13 @@ class Type(proto.Enum): Adversarial query classification type. NON_ANSWER_SEEKING_QUERY (2): Non-answer-seeking query classification type. + JAIL_BREAKING_QUERY (3): + Jail-breaking query classification type. """ TYPE_UNSPECIFIED = 0 ADVERSARIAL_QUERY = 1 NON_ANSWER_SEEKING_QUERY = 2 + JAIL_BREAKING_QUERY = 3 types: MutableSequence[ "AnswerQueryRequest.QueryUnderstandingSpec.QueryClassificationSpec.Type" @@ -1031,8 +1028,8 @@ class AnswerQueryResponse(proto.Message): Attributes: answer (google.cloud.discoveryengine_v1.types.Answer): Answer resource object. If - [AnswerQueryRequest.StepSpec.max_step_count][] is greater - than 1, use + [AnswerQueryRequest.QueryUnderstandingSpec.QueryRephraserSpec.max_rephrase_steps][google.cloud.discoveryengine.v1.AnswerQueryRequest.QueryUnderstandingSpec.QueryRephraserSpec.max_rephrase_steps] + is greater than 1, use [Answer.name][google.cloud.discoveryengine.v1.Answer.name] to fetch answer information using [ConversationalSearchService.GetAnswer][google.cloud.discoveryengine.v1.ConversationalSearchService.GetAnswer] diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/custom_tuning_model.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/custom_tuning_model.py new file mode 100644 index 000000000000..6b1a58131a45 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/custom_tuning_model.py @@ -0,0 +1,125 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "CustomTuningModel", + }, +) + + +class CustomTuningModel(proto.Message): + r"""Metadata that describes a custom tuned model. + + Attributes: + name (str): + Required. The fully qualified resource name of the model. + + Format: + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/customTuningModels/{custom_tuning_model}`` + model must be an alpha-numerical string with limit of 40 + characters. + display_name (str): + The display name of the model. + model_version (int): + The version of the model. + model_state (google.cloud.discoveryengine_v1.types.CustomTuningModel.ModelState): + The state that the model is in (e.g.``TRAINING`` or + ``TRAINING_FAILED``). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Deprecated: timestamp the Model was created + at. + training_start_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp the model training was initiated. + metrics (MutableMapping[str, float]): + The metrics of the trained model. + """ + + class ModelState(proto.Enum): + r"""The state of the model. + + Values: + MODEL_STATE_UNSPECIFIED (0): + Default value. + TRAINING_PAUSED (1): + The model is in a paused training state. + TRAINING (2): + The model is currently training. + TRAINING_COMPLETE (3): + The model has successfully completed + training. + READY_FOR_SERVING (4): + The model is ready for serving. + TRAINING_FAILED (5): + The model training failed. + NO_IMPROVEMENT (6): + The model training finished successfully but + metrics did not improve. + INPUT_VALIDATION_FAILED (7): + Input data validation failed. Model training + didn't start. + """ + MODEL_STATE_UNSPECIFIED = 0 + TRAINING_PAUSED = 1 + TRAINING = 2 + TRAINING_COMPLETE = 3 + READY_FOR_SERVING = 4 + TRAINING_FAILED = 5 + NO_IMPROVEMENT = 6 + INPUT_VALIDATION_FAILED = 7 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + model_version: int = proto.Field( + proto.INT64, + number=3, + ) + model_state: ModelState = proto.Field( + proto.ENUM, + number=4, + enum=ModelState, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + training_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + metrics: MutableMapping[str, float] = proto.MapField( + proto.STRING, + proto.DOUBLE, + number=7, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store_service.py index 89f4d5d68ccc..65647f1a300a 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/data_store_service.py @@ -67,6 +67,17 @@ class CreateDataStoreRequest(proto.Message): create an advanced data store for site search. If the data store is not configured as site search (GENERIC vertical and PUBLIC_WEBSITE content_config), this flag will be ignored. + skip_default_schema_creation (bool): + A boolean flag indicating whether to skip the default schema + creation for the data store. Only enable this flag if you + are certain that the default schema is incompatible with + your use case. + + If set to true, you must manually create a schema for the + data store before any documents can be ingested. + + This flag cannot be specified if + ``data_store.starting_schema`` is specified. """ parent: str = proto.Field( @@ -86,6 +97,10 @@ class CreateDataStoreRequest(proto.Message): proto.BOOL, number=4, ) + skip_default_schema_creation: bool = proto.Field( + proto.BOOL, + number=7, + ) class GetDataStoreRequest(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document.py index 0d4d2ad7fa5b..b613e83e20a2 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document.py @@ -19,6 +19,7 @@ from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -91,6 +92,14 @@ class Document(proto.Message): This field is OUTPUT_ONLY. If this field is not populated, it means the document has never been indexed. + index_status (google.cloud.discoveryengine_v1.types.Document.IndexStatus): + Output only. The index status of the document. + + - If document is indexed successfully, the index_time field + is populated. + - Otherwise, if document is not indexed due to errors, the + error_samples field is populated. + - Otherwise, index_status is unset. """ class Content(proto.Message): @@ -154,6 +163,31 @@ class Content(proto.Message): number=1, ) + class IndexStatus(proto.Message): + r"""Index status of the document. + + Attributes: + index_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the document was indexed. + If this field is populated, it means the + document has been indexed. + error_samples (MutableSequence[google.rpc.status_pb2.Status]): + A sample of errors encountered while indexing + the document. If this field is populated, the + document is not indexed due to errors. + """ + + index_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + error_samples: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + struct_data: struct_pb2.Struct = proto.Field( proto.MESSAGE, number=4, @@ -196,6 +230,11 @@ class Content(proto.Message): number=13, message=timestamp_pb2.Timestamp, ) + index_status: IndexStatus = proto.Field( + proto.MESSAGE, + number=15, + message=IndexStatus, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_processing_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_processing_config.py index 99d5a7597569..7f19cb717e94 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_processing_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_processing_config.py @@ -61,6 +61,8 @@ class DocumentProcessingConfig(proto.Message): digital parsing and layout parsing are supported. - ``pptx``: Override parsing config for PPTX files, only digital parsing and layout parsing are supported. + - ``xlsx``: Override parsing config for XLSX files, only + digital parsing and layout parsing are supported. """ class ChunkingConfig(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_service.py index 7e8d2b5f8263..39388da42c72 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/document_service.py @@ -18,6 +18,7 @@ from typing import MutableMapping, MutableSequence from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore from google.cloud.discoveryengine_v1.types import document as gcd_document @@ -31,6 +32,8 @@ "CreateDocumentRequest", "UpdateDocumentRequest", "DeleteDocumentRequest", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", }, ) @@ -268,4 +271,173 @@ class DeleteDocumentRequest(proto.Message): ) +class BatchGetDocumentsMetadataRequest(proto.Message): + r"""Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + + Attributes: + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + matcher (google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataRequest.Matcher): + Required. Matcher for the + [Document][google.cloud.discoveryengine.v1.Document]s. + """ + + class UrisMatcher(proto.Message): + r"""Matcher for the + [Document][google.cloud.discoveryengine.v1.Document]s by exact uris. + + Attributes: + uris (MutableSequence[str]): + The exact URIs to match by. + """ + + uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + class Matcher(proto.Message): + r"""Matcher for the + [Document][google.cloud.discoveryengine.v1.Document]s. Currently + supports matching by exact URIs. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uris_matcher (google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataRequest.UrisMatcher): + Matcher by exact URIs. + + This field is a member of `oneof`_ ``matcher``. + """ + + uris_matcher: "BatchGetDocumentsMetadataRequest.UrisMatcher" = proto.Field( + proto.MESSAGE, + number=1, + oneof="matcher", + message="BatchGetDocumentsMetadataRequest.UrisMatcher", + ) + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + matcher: Matcher = proto.Field( + proto.MESSAGE, + number=2, + message=Matcher, + ) + + +class BatchGetDocumentsMetadataResponse(proto.Message): + r"""Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata] + method. + + Attributes: + documents_metadata (MutableSequence[google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataResponse.DocumentMetadata]): + The metadata of the + [Document][google.cloud.discoveryengine.v1.Document]s. + """ + + class State(proto.Enum): + r"""The state of the + [Document][google.cloud.discoveryengine.v1.Document]. + + Values: + STATE_UNSPECIFIED (0): + Should never be set. + INDEXED (1): + The [Document][google.cloud.discoveryengine.v1.Document] is + indexed. + NOT_IN_TARGET_SITE (2): + The [Document][google.cloud.discoveryengine.v1.Document] is + not indexed because its URI is not in the + [TargetSite][google.cloud.discoveryengine.v1.TargetSite]. + NOT_IN_INDEX (3): + The [Document][google.cloud.discoveryengine.v1.Document] is + not indexed. + """ + STATE_UNSPECIFIED = 0 + INDEXED = 1 + NOT_IN_TARGET_SITE = 2 + NOT_IN_INDEX = 3 + + class DocumentMetadata(proto.Message): + r"""The metadata of a + [Document][google.cloud.discoveryengine.v1.Document]. + + Attributes: + matcher_value (google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue): + The value of the matcher that was used to match the + [Document][google.cloud.discoveryengine.v1.Document]. + state (google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataResponse.State): + The state of the document. + last_refreshed_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp of the last time the + [Document][google.cloud.discoveryengine.v1.Document] was + last indexed. + data_ingestion_source (str): + The data ingestion source of the + [Document][google.cloud.discoveryengine.v1.Document]. + + Allowed values are: + + - ``batch``: Data ingested via Batch API, e.g., + ImportDocuments. + - ``streaming`` Data ingested via Streaming API, e.g., FHIR + streaming. + """ + + class MatcherValue(proto.Message): + r"""The value of the matcher that was used to match the + [Document][google.cloud.discoveryengine.v1.Document]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uri (str): + If match by URI, the URI of the + [Document][google.cloud.discoveryengine.v1.Document]. + + This field is a member of `oneof`_ ``matcher_value``. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + oneof="matcher_value", + ) + + matcher_value: "BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue" = proto.Field( + proto.MESSAGE, + number=2, + message="BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue", + ) + state: "BatchGetDocumentsMetadataResponse.State" = proto.Field( + proto.ENUM, + number=3, + enum="BatchGetDocumentsMetadataResponse.State", + ) + last_refreshed_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + data_ingestion_source: str = proto.Field( + proto.STRING, + number=5, + ) + + documents_metadata: MutableSequence[DocumentMetadata] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=DocumentMetadata, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/grounded_generation_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/grounded_generation_service.py index 36de20d74df6..009d4e1badfd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/grounded_generation_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/grounded_generation_service.py @@ -180,8 +180,9 @@ class Claim(proto.Message): false. In that case, no grounding check was done for the claim and therefore [citation_indices][google.cloud.discoveryengine.v1.CheckGroundingResponse.Claim.citation_indices], + [anti_citation_indices][google.cloud.discoveryengine.v1.CheckGroundingResponse.Claim.anti_citation_indices], and - [anti_citation_indices][google.cloud.discoveryengine.v1.CheckGroundingResponse.Claim.anti_citation_indices] + [score][google.cloud.discoveryengine.v1.CheckGroundingResponse.Claim.score] should not be returned. This field is a member of `oneof`_ ``_grounding_check_required``. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/import_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/import_config.py index bade57407d0a..7c238cb1b194 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/import_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/import_config.py @@ -121,10 +121,10 @@ class BigQuerySource(proto.Message): This field is a member of `oneof`_ ``partition``. project_id (str): - The project ID (can be project # or ID) that - the BigQuery source is in with a length limit of - 128 characters. If not specified, inherits the - project ID from the parent request. + The project ID or the project number that + contains the BigQuery source. Has a length limit + of 128 characters. If not specified, inherits + the project ID from the parent request. dataset_id (str): Required. The BigQuery data set to copy the data from with a length limit of 1,024 @@ -197,9 +197,9 @@ class SpannerSource(proto.Message): Attributes: project_id (str): - The project ID that the Spanner source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Spanner + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The instance ID of the source @@ -420,9 +420,9 @@ class BigtableSource(proto.Message): Attributes: project_id (str): - The project ID that the Bigtable source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Bigtable + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The instance ID of the Cloud @@ -470,6 +470,11 @@ class FhirStoreSource(proto.Message): characters. Can be specified if one wants to have the FhirStore export to a specific Cloud Storage directory. + resource_types (MutableSequence[str]): + The FHIR resource types to import. The resource types should + be a subset of all `supported FHIR resource + types `__. + Default to all supported FHIR resource types if empty. """ fhir_store: str = proto.Field( @@ -480,6 +485,10 @@ class FhirStoreSource(proto.Message): proto.STRING, number=2, ) + resource_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class CloudSqlSource(proto.Message): @@ -487,9 +496,9 @@ class CloudSqlSource(proto.Message): Attributes: project_id (str): - The project ID that the Cloud SQL source is - in with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Cloud SQL + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The Cloud SQL instance to copy the @@ -547,9 +556,9 @@ class AlloyDbSource(proto.Message): Attributes: project_id (str): - The project ID that the AlloyDB source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the AlloyDB + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. location_id (str): Required. The AlloyDB location to copy the diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/purge_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/purge_config.py index 6a4b091e5ba5..6633bc93f7a8 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/purge_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/purge_config.py @@ -21,9 +21,15 @@ from google.rpc import status_pb2 # type: ignore import proto # type: ignore +from google.cloud.discoveryengine_v1.types import import_config + __protobuf__ = proto.module( package="google.cloud.discoveryengine.v1", manifest={ + "PurgeUserEventsRequest", + "PurgeUserEventsResponse", + "PurgeUserEventsMetadata", + "PurgeErrorConfig", "PurgeDocumentsRequest", "PurgeDocumentsResponse", "PurgeDocumentsMetadata", @@ -37,24 +43,211 @@ ) +class PurgeUserEventsRequest(proto.Message): + r"""Request message for PurgeUserEvents method. + + Attributes: + parent (str): + Required. The resource name of the catalog under which the + events are created. The format is + ``projects/{project}/locations/global/collections/{collection}/dataStores/{dataStore}`` + filter (str): + Required. The filter string to specify the events to be + deleted with a length limit of 5,000 characters. The + eligible fields for filtering are: + + - ``eventType``: Double quoted + [UserEvent.event_type][google.cloud.discoveryengine.v1.UserEvent.event_type] + string. + - ``eventTime``: in ISO 8601 "zulu" format. + - ``userPseudoId``: Double quoted string. Specifying this + will delete all events associated with a visitor. + - ``userId``: Double quoted string. Specifying this will + delete all events associated with a user. + + Examples: + + - Deleting all events in a time range: + ``eventTime > "2012-04-23T18:25:43.511Z" eventTime < "2012-04-23T18:30:43.511Z"`` + - Deleting specific eventType: ``eventType = "search"`` + - Deleting all events for a specific visitor: + ``userPseudoId = "visitor1024"`` + - Deleting all events inside a DataStore: ``*`` + + The filtering fields are assumed to have an implicit AND. + force (bool): + The ``force`` field is currently not supported. Purge user + event requests will permanently delete all purgeable events. + Once the development is complete: If ``force`` is set to + false, the method will return the expected purge count + without deleting any user events. This field will default to + false if not included in the request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class PurgeUserEventsResponse(proto.Message): + r"""Response of the PurgeUserEventsRequest. If the long running + operation is successfully done, then this message is returned by + the google.longrunning.Operations.response field. + + Attributes: + purge_count (int): + The total count of events purged as a result + of the operation. + """ + + purge_count: int = proto.Field( + proto.INT64, + number=1, + ) + + +class PurgeUserEventsMetadata(proto.Message): + r"""Metadata related to the progress of the PurgeUserEvents + operation. This will be returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + success_count (int): + Count of entries that were deleted + successfully. + failure_count (int): + Count of entries that encountered errors + while processing. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + success_count: int = proto.Field( + proto.INT64, + number=3, + ) + failure_count: int = proto.Field( + proto.INT64, + number=4, + ) + + +class PurgeErrorConfig(proto.Message): + r"""Configuration of destination for Purge related errors. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_prefix (str): + Cloud Storage prefix for purge errors. This must be an + empty, existing Cloud Storage directory. Purge errors are + written to sharded files in this directory, one per line, as + a JSON-encoded ``google.rpc.Status`` message. + + This field is a member of `oneof`_ ``destination``. + """ + + gcs_prefix: str = proto.Field( + proto.STRING, + number=1, + oneof="destination", + ) + + class PurgeDocumentsRequest(proto.Message): r"""Request message for [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1.DocumentService.PurgeDocuments] method. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: + gcs_source (google.cloud.discoveryengine_v1.types.GcsSource): + Cloud Storage location for the input content. Supported + ``data_schema``: + + - ``document_id``: One valid + [Document.id][google.cloud.discoveryengine.v1.Document.id] + per line. + + This field is a member of `oneof`_ ``source``. + inline_source (google.cloud.discoveryengine_v1.types.PurgeDocumentsRequest.InlineSource): + Inline source for the input content for + purge. + + This field is a member of `oneof`_ ``source``. parent (str): Required. The parent resource name, such as ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. filter (str): Required. Filter matching documents to purge. Only currently supported value is ``*`` (all items). + error_config (google.cloud.discoveryengine_v1.types.PurgeErrorConfig): + The desired location of errors incurred + during the purge. force (bool): Actually performs the purge. If ``force`` is set to false, return the expected purge count without deleting any documents. """ + class InlineSource(proto.Message): + r"""The inline source for the input config for + [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1.DocumentService.PurgeDocuments] + method. + + Attributes: + documents (MutableSequence[str]): + Required. A list of full resource name of documents to + purge. In the format + ``projects/*/locations/*/collections/*/dataStores/*/branches/*/documents/*``. + Recommended max of 100 items. + """ + + documents: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + gcs_source: import_config.GcsSource = proto.Field( + proto.MESSAGE, + number=5, + oneof="source", + message=import_config.GcsSource, + ) + inline_source: InlineSource = proto.Field( + proto.MESSAGE, + number=6, + oneof="source", + message=InlineSource, + ) parent: str = proto.Field( proto.STRING, number=1, @@ -63,6 +256,11 @@ class PurgeDocumentsRequest(proto.Message): proto.STRING, number=2, ) + error_config: "PurgeErrorConfig" = proto.Field( + proto.MESSAGE, + number=7, + message="PurgeErrorConfig", + ) force: bool = proto.Field( proto.BOOL, number=3, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/recommendation_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/recommendation_service.py index f225dff737b9..ae0cbbd72035 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/recommendation_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/recommendation_service.py @@ -37,7 +37,8 @@ class RecommendRequest(proto.Message): Attributes: serving_config (str): - Required. Full resource name of a [ServingConfig][]: + Required. Full resource name of a + [ServingConfig][google.cloud.discoveryengine.v1.ServingConfig]: ``projects/*/locations/global/collections/*/engines/*/servingConfigs/*``, or ``projects/*/locations/global/collections/*/dataStores/*/servingConfigs/*`` diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_service.py index 082241aaa2dd..4c31c629c97b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_service.py @@ -139,9 +139,12 @@ class SearchRequest(proto.Message): Leave it unset if ordered by relevance. ``order_by`` expression is case-sensitive. - For more information on ordering for retail search, see - `Ordering `__ - + For more information on ordering the website search results, + see `Order web search + results `__. + For more information on ordering the healthcare search + results, see `Order healthcare search + results `__. If this field is unrecognizable, an ``INVALID_ARGUMENT`` is returned. user_info (google.cloud.discoveryengine_v1.types.UserInfo): @@ -682,12 +685,7 @@ class ContentSearchSpec(proto.Message): be no extractive answer in the search response. search_result_mode (google.cloud.discoveryengine_v1.types.SearchRequest.ContentSearchSpec.SearchResultMode): Specifies the search result mode. If unspecified, the search - result mode is based on - [DataStore.DocumentProcessingConfig.chunking_config][]: - - - If [DataStore.DocumentProcessingConfig.chunking_config][] - is specified, it defaults to ``CHUNKS``. - - Otherwise, it defaults to ``DOCUMENTS``. + result mode defaults to ``DOCUMENTS``. chunk_spec (google.cloud.discoveryengine_v1.types.SearchRequest.ContentSearchSpec.ChunkSpec): Specifies the chunk spec to be returned from the search response. Only available if the @@ -698,12 +696,7 @@ class ContentSearchSpec(proto.Message): class SearchResultMode(proto.Enum): r"""Specifies the search result mode. If unspecified, the search result - mode is based on - [DataStore.DocumentProcessingConfig.chunking_config][]: - - - If [DataStore.DocumentProcessingConfig.chunking_config][] is - specified, it defaults to ``CHUNKS``. - - Otherwise, it defaults to ``DOCUMENTS``. + mode defaults to ``DOCUMENTS``. Values: SEARCH_RESULT_MODE_UNSPECIFIED (0): @@ -815,6 +808,14 @@ class SummarySpec(proto.Message): navigational queries. If this field is set to ``true``, we skip generating summaries for non-summary seeking queries and return fallback messages instead. + ignore_low_relevant_content (bool): + Specifies whether to filter out queries that have low + relevance. The default value is ``false``. + + If this field is set to ``false``, all search results are + used regardless of relevance to generate answers. If set to + ``true``, only queries with high relevance search results + will generate answers. model_prompt_spec (google.cloud.discoveryengine_v1.types.SearchRequest.ContentSearchSpec.SummarySpec.ModelPromptSpec): If specified, the spec will be used to modify the prompt provided to the LLM. @@ -892,6 +893,10 @@ class ModelSpec(proto.Message): proto.BOOL, number=4, ) + ignore_low_relevant_content: bool = proto.Field( + proto.BOOL, + number=9, + ) model_prompt_spec: "SearchRequest.ContentSearchSpec.SummarySpec.ModelPromptSpec" = proto.Field( proto.MESSAGE, number=5, @@ -1275,7 +1280,8 @@ class SearchResponse(proto.Message): A unique search token. This should be included in the [UserEvent][google.cloud.discoveryengine.v1.UserEvent] logs resulting from this search, which enables accurate - attribution of search model performance. + attribution of search model performance. This also helps to + identify a request during the customer support scenarios. redirect_uri (str): The URI of a customer-defined redirect page. If redirect action is triggered, no search is performed, and only @@ -1442,13 +1448,13 @@ class SummarySkippedReason(proto.Enum): ADVERSARIAL_QUERY_IGNORED (1): The adversarial query ignored case. - Only populated when + Only used when [SummarySpec.ignore_adversarial_query][google.cloud.discoveryengine.v1.SearchRequest.ContentSearchSpec.SummarySpec.ignore_adversarial_query] is set to ``true``. NON_SUMMARY_SEEKING_QUERY_IGNORED (2): The non-summary seeking query ignored case. - Only populated when + Only used when [SummarySpec.ignore_non_summary_seeking_query][google.cloud.discoveryengine.v1.SearchRequest.ContentSearchSpec.SummarySpec.ignore_non_summary_seeking_query] is set to ``true``. OUT_OF_DOMAIN_QUERY_IGNORED (3): @@ -1470,6 +1476,24 @@ class SummarySkippedReason(proto.Enum): Google skips the summary if the LLM addon is not enabled. + NO_RELEVANT_CONTENT (6): + The no relevant content case. + + Google skips the summary if there is no relevant + content in the retrieved search results. + JAIL_BREAKING_QUERY_IGNORED (7): + The jail-breaking query ignored case. + + For example, "Reply in the tone of a competing company's + CEO". Only used when + [SearchRequest.ContentSearchSpec.SummarySpec.ignore_jail_breaking_query] + is set to ``true``. + CUSTOMER_POLICY_VIOLATION (8): + The customer policy violation case. + + Google skips the summary if there is a customer + policy violation detected. The policy is defined + by the customer. """ SUMMARY_SKIPPED_REASON_UNSPECIFIED = 0 ADVERSARIAL_QUERY_IGNORED = 1 @@ -1477,6 +1501,9 @@ class SummarySkippedReason(proto.Enum): OUT_OF_DOMAIN_QUERY_IGNORED = 3 POTENTIAL_POLICY_VIOLATION = 4 LLM_ADDON_NOT_ENABLED = 5 + NO_RELEVANT_CONTENT = 6 + JAIL_BREAKING_QUERY_IGNORED = 7 + CUSTOMER_POLICY_VIOLATION = 8 class SafetyAttributes(proto.Message): r"""Safety Attribute categories and their associated confidence diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_tuning_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_tuning_service.py new file mode 100644 index 000000000000..d32623e52681 --- /dev/null +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/search_tuning_service.py @@ -0,0 +1,271 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.discoveryengine_v1.types import custom_tuning_model, import_config + +__protobuf__ = proto.module( + package="google.cloud.discoveryengine.v1", + manifest={ + "ListCustomModelsRequest", + "ListCustomModelsResponse", + "TrainCustomModelRequest", + "TrainCustomModelResponse", + "TrainCustomModelMetadata", + }, +) + + +class ListCustomModelsRequest(proto.Message): + r"""Request message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. + + Attributes: + data_store (str): + Required. The resource name of the parent Data Store, such + as + ``projects/*/locations/global/collections/default_collection/dataStores/default_data_store``. + This field is used to identify the data store where to fetch + the models from. + """ + + data_store: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListCustomModelsResponse(proto.Message): + r"""Response message for + [SearchTuningService.ListCustomModels][google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels] + method. + + Attributes: + models (MutableSequence[google.cloud.discoveryengine_v1.types.CustomTuningModel]): + List of custom tuning models. + """ + + models: MutableSequence[ + custom_tuning_model.CustomTuningModel + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=custom_tuning_model.CustomTuningModel, + ) + + +class TrainCustomModelRequest(proto.Message): + r"""Request message for + [SearchTuningService.TrainCustomModel][google.cloud.discoveryengine.v1.SearchTuningService.TrainCustomModel] + method. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_training_input (google.cloud.discoveryengine_v1.types.TrainCustomModelRequest.GcsTrainingInput): + Cloud Storage training input. + + This field is a member of `oneof`_ ``training_input``. + data_store (str): + Required. The resource name of the Data Store, such as + ``projects/*/locations/global/collections/default_collection/dataStores/default_data_store``. + This field is used to identify the data store where to train + the models. + model_type (str): + Model to be trained. Supported values are: + + - **search-tuning**: Fine tuning the search system based on + data provided. + error_config (google.cloud.discoveryengine_v1.types.ImportErrorConfig): + The desired location of errors incurred + during the data ingestion and training. + model_id (str): + If not provided, a UUID will be generated. + """ + + class GcsTrainingInput(proto.Message): + r"""Cloud Storage training data input. + + Attributes: + corpus_data_path (str): + The Cloud Storage corpus data which could be associated in + train data. The data path format is + ``gs:///``. A newline + delimited jsonl/ndjson file. + + For search-tuning model, each line should have the \_id, + title and text. Example: + ``{"_id": "doc1", title: "relevant doc", "text": "relevant text"}`` + query_data_path (str): + The gcs query data which could be associated in train data. + The data path format is + ``gs:///``. A newline + delimited jsonl/ndjson file. + + For search-tuning model, each line should have the \_id and + text. Example: {"_id": "query1", "text": "example query"} + train_data_path (str): + Cloud Storage training data path whose format should be + ``gs:///``. The file should + be in tsv format. Each line should have the doc_id and + query_id and score (number). + + For search-tuning model, it should have the query-id + corpus-id score as tsv file header. The score should be a + number in ``[0, inf+)``. The larger the number is, the more + relevant the pair is. Example: + + - ``query-id\tcorpus-id\tscore`` + - ``query1\tdoc1\t1`` + test_data_path (str): + Cloud Storage test data. Same format as train_data_path. If + not provided, a random 80/20 train/test split will be + performed on train_data_path. + """ + + corpus_data_path: str = proto.Field( + proto.STRING, + number=1, + ) + query_data_path: str = proto.Field( + proto.STRING, + number=2, + ) + train_data_path: str = proto.Field( + proto.STRING, + number=3, + ) + test_data_path: str = proto.Field( + proto.STRING, + number=4, + ) + + gcs_training_input: GcsTrainingInput = proto.Field( + proto.MESSAGE, + number=2, + oneof="training_input", + message=GcsTrainingInput, + ) + data_store: str = proto.Field( + proto.STRING, + number=1, + ) + model_type: str = proto.Field( + proto.STRING, + number=3, + ) + error_config: import_config.ImportErrorConfig = proto.Field( + proto.MESSAGE, + number=4, + message=import_config.ImportErrorConfig, + ) + model_id: str = proto.Field( + proto.STRING, + number=5, + ) + + +class TrainCustomModelResponse(proto.Message): + r"""Response of the + [TrainCustomModelRequest][google.cloud.discoveryengine.v1.TrainCustomModelRequest]. + This message is returned by the + google.longrunning.Operations.response field. + + Attributes: + error_samples (MutableSequence[google.rpc.status_pb2.Status]): + A sample of errors encountered while + processing the data. + error_config (google.cloud.discoveryengine_v1.types.ImportErrorConfig): + Echoes the destination for the complete + errors in the request if set. + model_status (str): + The trained model status. Possible values are: + + - **bad-data**: The training data quality is bad. + - **no-improvement**: Tuning didn't improve performance. + Won't deploy. + - **in-progress**: Model training job creation is in + progress. + - **training**: Model is actively training. + - **evaluating**: The model is evaluating trained metrics. + - **indexing**: The model trained metrics are indexing. + - **ready**: The model is ready for serving. + metrics (MutableMapping[str, float]): + The metrics of the trained model. + model_name (str): + Fully qualified name of the + CustomTuningModel. + """ + + error_samples: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + error_config: import_config.ImportErrorConfig = proto.Field( + proto.MESSAGE, + number=2, + message=import_config.ImportErrorConfig, + ) + model_status: str = proto.Field( + proto.STRING, + number=3, + ) + metrics: MutableMapping[str, float] = proto.MapField( + proto.STRING, + proto.DOUBLE, + number=4, + ) + model_name: str = proto.Field( + proto.STRING, + number=5, + ) + + +class TrainCustomModelMetadata(proto.Message): + r"""Metadata related to the progress of the TrainCustomModel + operation. This is returned by the + google.longrunning.Operation.metadata field. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Operation create time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Operation last update time. If the operation + is done, this is also the finish time. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event.py index 450bc325a9de..65732fabdad7 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/types/user_event.py @@ -671,6 +671,9 @@ class DocumentInfo(proto.Message): The promotion IDs associated with this Document. Currently, this field is restricted to at most one ID. + joined (bool): + Output only. Whether the referenced Document + can be found in the data store. """ id: str = proto.Field( @@ -697,6 +700,10 @@ class DocumentInfo(proto.Message): proto.STRING, number=4, ) + joined: bool = proto.Field( + proto.BOOL, + number=5, + ) class PanelInfo(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/__init__.py index 1c4059675d8a..0c6552f8e52c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/__init__.py @@ -141,7 +141,7 @@ UpdateSessionRequest, ) from .types.custom_tuning_model import CustomTuningModel -from .types.data_store import DataStore, LanguageInfo +from .types.data_store import DataStore, LanguageInfo, WorkspaceConfig from .types.data_store_service import ( CreateDataStoreMetadata, CreateDataStoreRequest, @@ -157,6 +157,8 @@ from .types.document import Document, ProcessedDocument from .types.document_processing_config import DocumentProcessingConfig from .types.document_service import ( + BatchGetDocumentsMetadataRequest, + BatchGetDocumentsMetadataResponse, CreateDocumentRequest, DeleteDocumentRequest, GetDocumentRequest, @@ -378,6 +380,8 @@ "BatchCreateTargetSiteMetadata", "BatchCreateTargetSitesRequest", "BatchCreateTargetSitesResponse", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", "BatchVerifyTargetSitesMetadata", "BatchVerifyTargetSitesRequest", "BatchVerifyTargetSitesResponse", @@ -632,5 +636,6 @@ "UserEvent", "UserEventServiceClient", "UserInfo", + "WorkspaceConfig", "WriteUserEventRequest", ) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_metadata.json b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_metadata.json index d1a982467642..ef4a01c7f9ab 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_metadata.json +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_metadata.json @@ -634,6 +634,11 @@ "grpc": { "libraryClient": "DocumentServiceClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" @@ -679,6 +684,11 @@ "grpc-async": { "libraryClient": "DocumentServiceAsyncClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" @@ -724,6 +734,11 @@ "rest": { "libraryClient": "DocumentServiceClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/async_client.py index d27321c77e71..8d0e4ed5f010 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/async_client.py @@ -272,7 +272,7 @@ async def update_acl_config( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> acl_config.AclConfig: - r"""Default Acl Configuration for use in a location of a + r"""Default ACL configuration for use in a location of a customer's project. Updates will only reflect to new data stores. Existing data stores will still use the old value. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py index 93ac988cd074..21cb6595333e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py @@ -693,7 +693,7 @@ def update_acl_config( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> acl_config.AclConfig: - r"""Default Acl Configuration for use in a location of a + r"""Default ACL configuration for use in a location of a customer's project. Updates will only reflect to new data stores. Existing data stores will still use the old value. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc.py index 1c655ad8b840..89945a11af67 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc.py @@ -242,7 +242,7 @@ def update_acl_config( ) -> Callable[[acl_config_service.UpdateAclConfigRequest], acl_config.AclConfig]: r"""Return a callable for the update acl config method over gRPC. - Default Acl Configuration for use in a location of a + Default ACL configuration for use in a location of a customer's project. Updates will only reflect to new data stores. Existing data stores will still use the old value. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc_asyncio.py index c20e2dc0d50c..ca2d70a4a12f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/grpc_asyncio.py @@ -248,7 +248,7 @@ def update_acl_config( ]: r"""Return a callable for the update acl config method over gRPC. - Default Acl Configuration for use in a location of a + Default ACL configuration for use in a location of a customer's project. Updates will only reflect to new data stores. Existing data stores will still use the old value. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/async_client.py index ceaa82413af3..c5be03187f84 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/async_client.py @@ -1274,6 +1274,118 @@ async def sample_get_processed_document(): # Done; return the response. return response + async def batch_get_documents_metadata( + self, + request: Optional[ + Union[document_service.BatchGetDocumentsMetadataRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1alpha.Document]s. + Supported for website search only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + async def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1alpha.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = await client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataRequest, dict]]): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + parent (:class:`str`): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, document_service.BatchGetDocumentsMetadataRequest): + request = document_service.BatchGetDocumentsMetadataRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.batch_get_documents_metadata + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py index 4713addb0922..11e4241be964 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py @@ -1723,6 +1723,117 @@ def sample_get_processed_document(): # Done; return the response. return response + def batch_get_documents_metadata( + self, + request: Optional[ + Union[document_service.BatchGetDocumentsMetadataRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1alpha.Document]s. + Supported for website search only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1alpha + + def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1alpha.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataRequest, dict]): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, document_service.BatchGetDocumentsMetadataRequest): + request = document_service.BatchGetDocumentsMetadataRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.batch_get_documents_metadata + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "DocumentServiceClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/base.py index 5acab8f79e13..6eb81a0622da 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/base.py @@ -186,6 +186,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.batch_get_documents_metadata: gapic_v1.method.wrap_method( + self.batch_get_documents_metadata, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -277,6 +282,18 @@ def get_processed_document( ]: raise NotImplementedError() + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + Union[ + document_service.BatchGetDocumentsMetadataResponse, + Awaitable[document_service.BatchGetDocumentsMetadataResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc.py index 462ae0dae998..5a71389bf9b1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc.py @@ -503,6 +503,37 @@ def get_processed_document( ) return self._stubs["get_processed_document"] + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + document_service.BatchGetDocumentsMetadataResponse, + ]: + r"""Return a callable for the batch get documents metadata method over gRPC. + + Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1alpha.Document]s. + Supported for website search only. + + Returns: + Callable[[~.BatchGetDocumentsMetadataRequest], + ~.BatchGetDocumentsMetadataResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_get_documents_metadata" not in self._stubs: + self._stubs["batch_get_documents_metadata"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.DocumentService/BatchGetDocumentsMetadata", + request_serializer=document_service.BatchGetDocumentsMetadataRequest.serialize, + response_deserializer=document_service.BatchGetDocumentsMetadataResponse.deserialize, + ) + return self._stubs["batch_get_documents_metadata"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc_asyncio.py index 46ea26ca0ae2..6a794841145b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/grpc_asyncio.py @@ -519,6 +519,37 @@ def get_processed_document( ) return self._stubs["get_processed_document"] + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + Awaitable[document_service.BatchGetDocumentsMetadataResponse], + ]: + r"""Return a callable for the batch get documents metadata method over gRPC. + + Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1alpha.Document]s. + Supported for website search only. + + Returns: + Callable[[~.BatchGetDocumentsMetadataRequest], + Awaitable[~.BatchGetDocumentsMetadataResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_get_documents_metadata" not in self._stubs: + self._stubs["batch_get_documents_metadata"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1alpha.DocumentService/BatchGetDocumentsMetadata", + request_serializer=document_service.BatchGetDocumentsMetadataRequest.serialize, + response_deserializer=document_service.BatchGetDocumentsMetadataResponse.deserialize, + ) + return self._stubs["batch_get_documents_metadata"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -571,6 +602,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.batch_get_documents_metadata: gapic_v1.method_async.wrap_method( + self.batch_get_documents_metadata, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py index e3eca6321b0d..23d578101bc4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py @@ -79,6 +79,14 @@ class DocumentServiceRestInterceptor: .. code-block:: python class MyCustomDocumentServiceInterceptor(DocumentServiceRestInterceptor): + def pre_batch_get_documents_metadata(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_get_documents_metadata(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_document(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -145,6 +153,31 @@ def post_update_document(self, response): """ + def pre_batch_get_documents_metadata( + self, + request: document_service.BatchGetDocumentsMetadataRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_service.BatchGetDocumentsMetadataRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for batch_get_documents_metadata + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_batch_get_documents_metadata( + self, response: document_service.BatchGetDocumentsMetadataResponse + ) -> document_service.BatchGetDocumentsMetadataResponse: + """Post-rpc interceptor for batch_get_documents_metadata + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + def pre_create_document( self, request: document_service.CreateDocumentRequest, @@ -655,6 +688,105 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client + class _BatchGetDocumentsMetadata(DocumentServiceRestStub): + def __hash__(self): + return hash("BatchGetDocumentsMetadata") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "matcher": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_service.BatchGetDocumentsMetadataRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Call the batch get documents + metadata method over HTTP. + + Args: + request (~.document_service.BatchGetDocumentsMetadataRequest): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_service.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*/dataStores/*/branches/*}/batchGetDocumentsMetadata", + }, + { + "method": "get", + "uri": "/v1alpha/{parent=projects/*/locations/*/collections/*/dataStores/*/branches/*}/batchGetDocumentsMetadata", + }, + ] + request, metadata = self._interceptor.pre_batch_get_documents_metadata( + request, metadata + ) + pb_request = document_service.BatchGetDocumentsMetadataRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_service.BatchGetDocumentsMetadataResponse() + pb_resp = document_service.BatchGetDocumentsMetadataResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_get_documents_metadata(resp) + return resp + class _CreateDocument(DocumentServiceRestStub): def __hash__(self): return hash("CreateDocument") @@ -1426,6 +1558,17 @@ def __call__( resp = self._interceptor.post_update_document(resp) return resp + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + document_service.BatchGetDocumentsMetadataResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchGetDocumentsMetadata(self._session, self._host, self._interceptor) # type: ignore + @property def create_document( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/__init__.py index 3f73bf8fcdbc..caa4c390030b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/__init__.py @@ -72,7 +72,7 @@ UpdateSessionRequest, ) from .custom_tuning_model import CustomTuningModel -from .data_store import DataStore, LanguageInfo +from .data_store import DataStore, LanguageInfo, WorkspaceConfig from .data_store_service import ( CreateDataStoreMetadata, CreateDataStoreRequest, @@ -88,6 +88,8 @@ from .document import Document, ProcessedDocument from .document_processing_config import DocumentProcessingConfig from .document_service import ( + BatchGetDocumentsMetadataRequest, + BatchGetDocumentsMetadataResponse, CreateDocumentRequest, DeleteDocumentRequest, GetDocumentRequest, @@ -337,6 +339,7 @@ "CustomTuningModel", "DataStore", "LanguageInfo", + "WorkspaceConfig", "CreateDataStoreMetadata", "CreateDataStoreRequest", "DeleteDataStoreMetadata", @@ -350,6 +353,8 @@ "Document", "ProcessedDocument", "DocumentProcessingConfig", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", "CreateDocumentRequest", "DeleteDocumentRequest", "GetDocumentRequest", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/answer.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/answer.py index aea3764d85fb..ef0d79a41a53 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/answer.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/answer.py @@ -111,6 +111,12 @@ class AnswerSkippedReason(proto.Enum): For example, "Reply in the tone of a competing company's CEO". Google skips the answer if the query is classified as a jail-breaking query. + CUSTOMER_POLICY_VIOLATION (7): + The customer policy violation case. + + Google skips the summary if there is a customer + policy violation detected. The policy is defined + by the customer. """ ANSWER_SKIPPED_REASON_UNSPECIFIED = 0 ADVERSARIAL_QUERY_IGNORED = 1 @@ -119,6 +125,7 @@ class AnswerSkippedReason(proto.Enum): POTENTIAL_POLICY_VIOLATION = 4 NO_RELEVANT_CONTENT = 5 JAIL_BREAKING_QUERY_IGNORED = 6 + CUSTOMER_POLICY_VIOLATION = 7 class Citation(proto.Message): r"""Citation info for a segment. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/control.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/control.py index 6f9d1b7ecb2c..c0972089ab3e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/control.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/control.py @@ -154,7 +154,7 @@ class Control(proto.Message): associated_serving_config_ids (MutableSequence[str]): Output only. List of all [ServingConfig][google.cloud.discoveryengine.v1alpha.ServingConfig] - ids this control is attached to. May take up to 10 minutes + IDs this control is attached to. May take up to 10 minutes to update after changes. solution_type (google.cloud.discoveryengine_v1alpha.types.SolutionType): Required. Immutable. What solution the diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/custom_tuning_model.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/custom_tuning_model.py index ab529ab2e91f..5381115d74c7 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/custom_tuning_model.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/custom_tuning_model.py @@ -74,6 +74,9 @@ class ModelState(proto.Enum): NO_IMPROVEMENT (6): The model training finished successfully but metrics did not improve. + INPUT_VALIDATION_FAILED (7): + Input data validation failed. Model training + didn't start. """ MODEL_STATE_UNSPECIFIED = 0 TRAINING_PAUSED = 1 @@ -82,6 +85,7 @@ class ModelState(proto.Enum): READY_FOR_SERVING = 4 TRAINING_FAILED = 5 NO_IMPROVEMENT = 6 + INPUT_VALIDATION_FAILED = 7 name: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/data_store.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/data_store.py index 82b01be6e0e5..d5ef66749bbe 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/data_store.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/data_store.py @@ -31,6 +31,7 @@ manifest={ "DataStore", "LanguageInfo", + "WorkspaceConfig", }, ) @@ -100,6 +101,12 @@ class DataStore(proto.Message): Currently ACL is only supported in ``GENERIC`` industry vertical with non-\ ``PUBLIC_WEBSITE`` content config. + workspace_config (google.cloud.discoveryengine_v1alpha.types.WorkspaceConfig): + Config to store data store type configuration for workspace + data. This must be set when + [DataStore.content_config][google.cloud.discoveryengine.v1alpha.DataStore.content_config] + is set as + [DataStore.ContentConfig.GOOGLE_WORKSPACE][google.cloud.discoveryengine.v1alpha.DataStore.ContentConfig.GOOGLE_WORKSPACE]. document_processing_config (google.cloud.discoveryengine_v1alpha.types.DocumentProcessingConfig): Configuration for Document understanding and enrichment. @@ -138,11 +145,16 @@ class ContentConfig(proto.Enum): PUBLIC_WEBSITE (3): The data store is used for public website search. + GOOGLE_WORKSPACE (4): + The data store is used for workspace search. Details of + workspace data store are specified in the + [WorkspaceConfig][google.cloud.discoveryengine.v1alpha.WorkspaceConfig]. """ CONTENT_CONFIG_UNSPECIFIED = 0 NO_CONTENT = 1 CONTENT_REQUIRED = 2 PUBLIC_WEBSITE = 3 + GOOGLE_WORKSPACE = 4 name: str = proto.Field( proto.STRING, @@ -190,6 +202,11 @@ class ContentConfig(proto.Enum): proto.BOOL, number=24, ) + workspace_config: "WorkspaceConfig" = proto.Field( + proto.MESSAGE, + number=25, + message="WorkspaceConfig", + ) document_processing_config: gcd_document_processing_config.DocumentProcessingConfig = proto.Field( proto.MESSAGE, number=27, @@ -240,4 +257,57 @@ class LanguageInfo(proto.Message): ) +class WorkspaceConfig(proto.Message): + r"""Config to store data store type configuration for workspace + data + + Attributes: + type_ (google.cloud.discoveryengine_v1alpha.types.WorkspaceConfig.Type): + The Google Workspace data source. + dasher_customer_id (str): + Obfuscated Dasher customer ID. + """ + + class Type(proto.Enum): + r"""Specifies the type of Workspace App supported by this + DataStore + + Values: + TYPE_UNSPECIFIED (0): + Defaults to an unspecified Workspace type. + GOOGLE_DRIVE (1): + Workspace Data Store contains Drive data + GOOGLE_MAIL (2): + Workspace Data Store contains Mail data + GOOGLE_SITES (3): + Workspace Data Store contains Sites data + GOOGLE_CALENDAR (4): + Workspace Data Store contains Calendar data + GOOGLE_CHAT (5): + Workspace Data Store contains Chat data + GOOGLE_GROUPS (6): + Workspace Data Store contains Groups data + GOOGLE_KEEP (7): + Workspace Data Store contains Keep data + """ + TYPE_UNSPECIFIED = 0 + GOOGLE_DRIVE = 1 + GOOGLE_MAIL = 2 + GOOGLE_SITES = 3 + GOOGLE_CALENDAR = 4 + GOOGLE_CHAT = 5 + GOOGLE_GROUPS = 6 + GOOGLE_KEEP = 7 + + type_: Type = proto.Field( + proto.ENUM, + number=1, + enum=Type, + ) + dasher_customer_id: str = proto.Field( + proto.STRING, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document.py index 7792e6eb5a81..119f1b89d472 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document.py @@ -19,6 +19,7 @@ from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore import proto # type: ignore from google.cloud.discoveryengine_v1alpha.types import common @@ -96,6 +97,14 @@ class Document(proto.Message): This field is OUTPUT_ONLY. If this field is not populated, it means the document has never been indexed. + index_status (google.cloud.discoveryengine_v1alpha.types.Document.IndexStatus): + Output only. The index status of the document. + + - If document is indexed successfully, the index_time field + is populated. + - Otherwise, if document is not indexed due to errors, the + error_samples field is populated. + - Otherwise, index_status is unset. """ class Content(proto.Message): @@ -211,6 +220,31 @@ class AccessRestriction(proto.Message): message="Document.AclInfo.AccessRestriction", ) + class IndexStatus(proto.Message): + r"""Index status of the document. + + Attributes: + index_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the document was indexed. + If this field is populated, it means the + document has been indexed. + error_samples (MutableSequence[google.rpc.status_pb2.Status]): + A sample of errors encountered while indexing + the document. If this field is populated, the + document is not indexed due to errors. + """ + + index_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + error_samples: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + struct_data: struct_pb2.Struct = proto.Field( proto.MESSAGE, number=4, @@ -258,6 +292,11 @@ class AccessRestriction(proto.Message): number=13, message=timestamp_pb2.Timestamp, ) + index_status: IndexStatus = proto.Field( + proto.MESSAGE, + number=15, + message=IndexStatus, + ) class ProcessedDocument(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document_service.py index cee091f225bd..f1e063d05b9b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/document_service.py @@ -18,6 +18,7 @@ from typing import MutableMapping, MutableSequence from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore from google.cloud.discoveryengine_v1alpha.types import document as gcd_document @@ -32,6 +33,8 @@ "UpdateDocumentRequest", "DeleteDocumentRequest", "GetProcessedDocumentRequest", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", }, ) @@ -349,4 +352,163 @@ class ProcessedDocumentFormat(proto.Enum): ) +class BatchGetDocumentsMetadataRequest(proto.Message): + r"""Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + + Attributes: + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + matcher (google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataRequest.Matcher): + Required. Matcher for the + [Document][google.cloud.discoveryengine.v1alpha.Document]s. + """ + + class UrisMatcher(proto.Message): + r"""Matcher for the + [Document][google.cloud.discoveryengine.v1alpha.Document]s by exact + uris. + + Attributes: + uris (MutableSequence[str]): + The exact URIs to match by. + """ + + uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + class Matcher(proto.Message): + r"""Matcher for the + [Document][google.cloud.discoveryengine.v1alpha.Document]s. + Currently supports matching by exact URIs. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uris_matcher (google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataRequest.UrisMatcher): + Matcher by exact URIs. + + This field is a member of `oneof`_ ``matcher``. + """ + + uris_matcher: "BatchGetDocumentsMetadataRequest.UrisMatcher" = proto.Field( + proto.MESSAGE, + number=1, + oneof="matcher", + message="BatchGetDocumentsMetadataRequest.UrisMatcher", + ) + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + matcher: Matcher = proto.Field( + proto.MESSAGE, + number=2, + message=Matcher, + ) + + +class BatchGetDocumentsMetadataResponse(proto.Message): + r"""Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata] + method. + + Attributes: + documents_metadata (MutableSequence[google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataResponse.DocumentMetadata]): + The metadata of the + [Document][google.cloud.discoveryengine.v1alpha.Document]s. + """ + + class State(proto.Enum): + r"""The state of the + [Document][google.cloud.discoveryengine.v1alpha.Document]. + + Values: + STATE_UNSPECIFIED (0): + Should never be set. + INDEXED (1): + The + [Document][google.cloud.discoveryengine.v1alpha.Document] is + indexed. + NOT_IN_TARGET_SITE (2): + The + [Document][google.cloud.discoveryengine.v1alpha.Document] is + not indexed because its URI is not in the + [TargetSite][google.cloud.discoveryengine.v1alpha.TargetSite]. + NOT_IN_INDEX (3): + The + [Document][google.cloud.discoveryengine.v1alpha.Document] is + not indexed. + """ + STATE_UNSPECIFIED = 0 + INDEXED = 1 + NOT_IN_TARGET_SITE = 2 + NOT_IN_INDEX = 3 + + class DocumentMetadata(proto.Message): + r"""The metadata of a + [Document][google.cloud.discoveryengine.v1alpha.Document]. + + Attributes: + matcher_value (google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue): + The value of the matcher that was used to match the + [Document][google.cloud.discoveryengine.v1alpha.Document]. + state (google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataResponse.State): + The state of the document. + last_refreshed_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp of the last time the + [Document][google.cloud.discoveryengine.v1alpha.Document] + was last indexed. + """ + + class MatcherValue(proto.Message): + r"""The value of the matcher that was used to match the + [Document][google.cloud.discoveryengine.v1alpha.Document]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uri (str): + If match by URI, the URI of the + [Document][google.cloud.discoveryengine.v1alpha.Document]. + + This field is a member of `oneof`_ ``matcher_value``. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + oneof="matcher_value", + ) + + matcher_value: "BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue" = proto.Field( + proto.MESSAGE, + number=2, + message="BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue", + ) + state: "BatchGetDocumentsMetadataResponse.State" = proto.Field( + proto.ENUM, + number=3, + enum="BatchGetDocumentsMetadataResponse.State", + ) + last_refreshed_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + documents_metadata: MutableSequence[DocumentMetadata] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=DocumentMetadata, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/grounded_generation_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/grounded_generation_service.py index 40fa41fd061f..03ce6ee20fd3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/grounded_generation_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/grounded_generation_service.py @@ -180,8 +180,9 @@ class Claim(proto.Message): false. In that case, no grounding check was done for the claim and therefore [citation_indices][google.cloud.discoveryengine.v1alpha.CheckGroundingResponse.Claim.citation_indices], + [anti_citation_indices][google.cloud.discoveryengine.v1alpha.CheckGroundingResponse.Claim.anti_citation_indices], and - [anti_citation_indices][google.cloud.discoveryengine.v1alpha.CheckGroundingResponse.Claim.anti_citation_indices] + [score][google.cloud.discoveryengine.v1alpha.CheckGroundingResponse.Claim.score] should not be returned. This field is a member of `oneof`_ ``_grounding_check_required``. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/import_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/import_config.py index 35e5e74f7660..7052c1e8850f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/import_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/import_config.py @@ -129,10 +129,10 @@ class BigQuerySource(proto.Message): This field is a member of `oneof`_ ``partition``. project_id (str): - The project ID (can be project # or ID) that - the BigQuery source is in with a length limit of - 128 characters. If not specified, inherits the - project ID from the parent request. + The project ID or the project number that + contains the BigQuery source. Has a length limit + of 128 characters. If not specified, inherits + the project ID from the parent request. dataset_id (str): Required. The BigQuery data set to copy the data from with a length limit of 1,024 @@ -205,9 +205,9 @@ class SpannerSource(proto.Message): Attributes: project_id (str): - The project ID that the Spanner source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Spanner + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The instance ID of the source @@ -428,9 +428,9 @@ class BigtableSource(proto.Message): Attributes: project_id (str): - The project ID that the Bigtable source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Bigtable + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The instance ID of the Cloud @@ -504,9 +504,9 @@ class CloudSqlSource(proto.Message): Attributes: project_id (str): - The project ID that the Cloud SQL source is - in with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Cloud SQL + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The Cloud SQL instance to copy the @@ -564,9 +564,9 @@ class AlloyDbSource(proto.Message): Attributes: project_id (str): - The project ID that the AlloyDB source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the AlloyDB + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. location_id (str): Required. The AlloyDB location to copy the diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py index 00e46651dd81..f1af3c79f881 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py @@ -1605,7 +1605,8 @@ class SearchResponse(proto.Message): A unique search token. This should be included in the [UserEvent][google.cloud.discoveryengine.v1alpha.UserEvent] logs resulting from this search, which enables accurate - attribution of search model performance. + attribution of search model performance. This also helps to + identify a request during the customer support scenarios. redirect_uri (str): The URI of a customer-defined redirect page. If redirect action is triggered, no search is performed, and only @@ -1876,6 +1877,18 @@ class SummarySkippedReason(proto.Enum): CEO". Only used when [SearchRequest.ContentSearchSpec.SummarySpec.ignore_jail_breaking_query] is set to ``true``. + CUSTOMER_POLICY_VIOLATION (8): + The customer policy violation case. + + Google skips the summary if there is a customer + policy violation detected. The policy is defined + by the customer. + NON_SUMMARY_SEEKING_QUERY_IGNORED_V2 (9): + The non-answer seeking query ignored case. + + Only used when + [SearchRequest.ContentSearchSpec.SummarySpec.ignore_non_answer_seeking_query] + is set to ``true``. """ SUMMARY_SKIPPED_REASON_UNSPECIFIED = 0 ADVERSARIAL_QUERY_IGNORED = 1 @@ -1885,6 +1898,8 @@ class SummarySkippedReason(proto.Enum): LLM_ADDON_NOT_ENABLED = 5 NO_RELEVANT_CONTENT = 6 JAIL_BREAKING_QUERY_IGNORED = 7 + CUSTOMER_POLICY_VIOLATION = 8 + NON_SUMMARY_SEEKING_QUERY_IGNORED_V2 = 9 class SafetyAttributes(proto.Message): r"""Safety Attribute categories and their associated confidence @@ -2169,6 +2184,9 @@ class StringConstraint(proto.Message): Values of the string field. The record will only be returned if the field value matches one of the values specified here. + query_segment (str): + Identifies the keywords within the search + query that match a filter. """ field_name: str = proto.Field( @@ -2179,6 +2197,10 @@ class StringConstraint(proto.Message): proto.STRING, number=2, ) + query_segment: str = proto.Field( + proto.STRING, + number=3, + ) class NumberConstraint(proto.Message): r"""Constraint expression of a number field. Example: price < @@ -2195,6 +2217,9 @@ class NumberConstraint(proto.Message): value (float): The value specified in the numerical constraint. + query_segment (str): + Identifies the keywords within the search + query that match a filter. """ class Comparison(proto.Enum): @@ -2234,6 +2259,10 @@ class Comparison(proto.Enum): proto.DOUBLE, number=3, ) + query_segment: str = proto.Field( + proto.STRING, + number=4, + ) class GeolocationConstraint(proto.Message): r"""Constraint of a geolocation field. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/user_event.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/user_event.py index 4deac76799fd..2162001f7b3b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/user_event.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/user_event.py @@ -676,6 +676,9 @@ class DocumentInfo(proto.Message): The promotion IDs associated with this Document. Currently, this field is restricted to at most one ID. + joined (bool): + Output only. Whether the referenced Document + can be found in the data store. """ id: str = proto.Field( @@ -702,6 +705,10 @@ class DocumentInfo(proto.Message): proto.STRING, number=4, ) + joined: bool = proto.Field( + proto.BOOL, + number=5, + ) class PanelInfo(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py index 3bde65ad26b0..c469dab86a26 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/__init__.py @@ -139,6 +139,8 @@ from .types.document import Document from .types.document_processing_config import DocumentProcessingConfig from .types.document_service import ( + BatchGetDocumentsMetadataRequest, + BatchGetDocumentsMetadataResponse, CreateDocumentRequest, DeleteDocumentRequest, GetDocumentRequest, @@ -214,6 +216,7 @@ PurgeDocumentsMetadata, PurgeDocumentsRequest, PurgeDocumentsResponse, + PurgeErrorConfig, PurgeSuggestionDenyListEntriesMetadata, PurgeSuggestionDenyListEntriesRequest, PurgeSuggestionDenyListEntriesResponse, @@ -338,6 +341,8 @@ "BatchCreateTargetSiteMetadata", "BatchCreateTargetSitesRequest", "BatchCreateTargetSitesResponse", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", "BatchVerifyTargetSitesMetadata", "BatchVerifyTargetSitesRequest", "BatchVerifyTargetSitesResponse", @@ -497,6 +502,7 @@ "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", + "PurgeErrorConfig", "PurgeSuggestionDenyListEntriesMetadata", "PurgeSuggestionDenyListEntriesRequest", "PurgeSuggestionDenyListEntriesResponse", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json index 8afe7dc88cd2..02ac1aa251d2 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_metadata.json @@ -506,6 +506,11 @@ "grpc": { "libraryClient": "DocumentServiceClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" @@ -546,6 +551,11 @@ "grpc-async": { "libraryClient": "DocumentServiceAsyncClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" @@ -586,6 +596,11 @@ "rest": { "libraryClient": "DocumentServiceClient", "rpcs": { + "BatchGetDocumentsMetadata": { + "methods": [ + "batch_get_documents_metadata" + ] + }, "CreateDocument": { "methods": [ "create_document" diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py index 3cd99963bdee..a75f7fba7fec 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/async_client.py @@ -1069,7 +1069,11 @@ async def sample_purge_documents(): client = discoveryengine_v1beta.DocumentServiceAsyncClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1beta.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1beta.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) @@ -1146,6 +1150,118 @@ async def sample_purge_documents(): # Done; return the response. return response + async def batch_get_documents_metadata( + self, + request: Optional[ + Union[document_service.BatchGetDocumentsMetadataRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1beta.Document]s. + Supported for website search only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + async def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1beta.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = await client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataRequest, dict]]): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + parent (:class:`str`): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, document_service.BatchGetDocumentsMetadataRequest): + request = document_service.BatchGetDocumentsMetadataRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.batch_get_documents_metadata + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py index 4dbf8a833298..696861d2050d 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py @@ -1523,7 +1523,11 @@ def sample_purge_documents(): client = discoveryengine_v1beta.DocumentServiceClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1beta.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1beta.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) @@ -1598,6 +1602,117 @@ def sample_purge_documents(): # Done; return the response. return response + def batch_get_documents_metadata( + self, + request: Optional[ + Union[document_service.BatchGetDocumentsMetadataRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1beta.Document]s. + Supported for website search only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import discoveryengine_v1beta + + def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1beta.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataRequest, dict]): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, document_service.BatchGetDocumentsMetadataRequest): + request = document_service.BatchGetDocumentsMetadataRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.batch_get_documents_metadata + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "DocumentServiceClient": return self diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/base.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/base.py index 6491783a2c52..8cb10eba46cf 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/base.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/base.py @@ -181,6 +181,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.batch_get_documents_metadata: gapic_v1.method.wrap_method( + self.batch_get_documents_metadata, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -263,6 +268,18 @@ def purge_documents( ]: raise NotImplementedError() + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + Union[ + document_service.BatchGetDocumentsMetadataResponse, + Awaitable[document_service.BatchGetDocumentsMetadataResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc.py index 97a0a6923af9..5d1d8749c5d0 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc.py @@ -473,6 +473,37 @@ def purge_documents( ) return self._stubs["purge_documents"] + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + document_service.BatchGetDocumentsMetadataResponse, + ]: + r"""Return a callable for the batch get documents metadata method over gRPC. + + Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1beta.Document]s. + Supported for website search only. + + Returns: + Callable[[~.BatchGetDocumentsMetadataRequest], + ~.BatchGetDocumentsMetadataResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_get_documents_metadata" not in self._stubs: + self._stubs["batch_get_documents_metadata"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.DocumentService/BatchGetDocumentsMetadata", + request_serializer=document_service.BatchGetDocumentsMetadataRequest.serialize, + response_deserializer=document_service.BatchGetDocumentsMetadataResponse.deserialize, + ) + return self._stubs["batch_get_documents_metadata"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc_asyncio.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc_asyncio.py index d8dc444b89d1..cebef2f02dd5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/grpc_asyncio.py @@ -488,6 +488,37 @@ def purge_documents( ) return self._stubs["purge_documents"] + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + Awaitable[document_service.BatchGetDocumentsMetadataResponse], + ]: + r"""Return a callable for the batch get documents metadata method over gRPC. + + Gets index freshness metadata for + [Document][google.cloud.discoveryengine.v1beta.Document]s. + Supported for website search only. + + Returns: + Callable[[~.BatchGetDocumentsMetadataRequest], + Awaitable[~.BatchGetDocumentsMetadataResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_get_documents_metadata" not in self._stubs: + self._stubs["batch_get_documents_metadata"] = self.grpc_channel.unary_unary( + "/google.cloud.discoveryengine.v1beta.DocumentService/BatchGetDocumentsMetadata", + request_serializer=document_service.BatchGetDocumentsMetadataRequest.serialize, + response_deserializer=document_service.BatchGetDocumentsMetadataResponse.deserialize, + ) + return self._stubs["batch_get_documents_metadata"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -535,6 +566,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.batch_get_documents_metadata: gapic_v1.method_async.wrap_method( + self.batch_get_documents_metadata, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py index db93f79c1cf2..2014752b11ae 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py @@ -79,6 +79,14 @@ class DocumentServiceRestInterceptor: .. code-block:: python class MyCustomDocumentServiceInterceptor(DocumentServiceRestInterceptor): + def pre_batch_get_documents_metadata(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_get_documents_metadata(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_document(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -137,6 +145,31 @@ def post_update_document(self, response): """ + def pre_batch_get_documents_metadata( + self, + request: document_service.BatchGetDocumentsMetadataRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + document_service.BatchGetDocumentsMetadataRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for batch_get_documents_metadata + + Override in a subclass to manipulate the request or metadata + before they are sent to the DocumentService server. + """ + return request, metadata + + def post_batch_get_documents_metadata( + self, response: document_service.BatchGetDocumentsMetadataResponse + ) -> document_service.BatchGetDocumentsMetadataResponse: + """Post-rpc interceptor for batch_get_documents_metadata + + Override in a subclass to manipulate the response + after it is returned by the DocumentService server but before + it is returned to user code. + """ + return response + def pre_create_document( self, request: document_service.CreateDocumentRequest, @@ -616,6 +649,105 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client + class _BatchGetDocumentsMetadata(DocumentServiceRestStub): + def __hash__(self): + return hash("BatchGetDocumentsMetadata") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "matcher": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: document_service.BatchGetDocumentsMetadataRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> document_service.BatchGetDocumentsMetadataResponse: + r"""Call the batch get documents + metadata method over HTTP. + + Args: + request (~.document_service.BatchGetDocumentsMetadataRequest): + The request object. Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.document_service.BatchGetDocumentsMetadataResponse: + Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/dataStores/*/branches/*}/batchGetDocumentsMetadata", + }, + { + "method": "get", + "uri": "/v1beta/{parent=projects/*/locations/*/collections/*/dataStores/*/branches/*}/batchGetDocumentsMetadata", + }, + ] + request, metadata = self._interceptor.pre_batch_get_documents_metadata( + request, metadata + ) + pb_request = document_service.BatchGetDocumentsMetadataRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = document_service.BatchGetDocumentsMetadataResponse() + pb_resp = document_service.BatchGetDocumentsMetadataResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_get_documents_metadata(resp) + return resp + class _CreateDocument(DocumentServiceRestStub): def __hash__(self): return hash("CreateDocument") @@ -1289,6 +1421,17 @@ def __call__( resp = self._interceptor.post_update_document(resp) return resp + @property + def batch_get_documents_metadata( + self, + ) -> Callable[ + [document_service.BatchGetDocumentsMetadataRequest], + document_service.BatchGetDocumentsMetadataResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchGetDocumentsMetadata(self._session, self._host, self._interceptor) # type: ignore + @property def create_document( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py index 1392c0e5f3a6..cc7c07b55354 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/__init__.py @@ -79,6 +79,8 @@ from .document import Document from .document_processing_config import DocumentProcessingConfig from .document_service import ( + BatchGetDocumentsMetadataRequest, + BatchGetDocumentsMetadataResponse, CreateDocumentRequest, DeleteDocumentRequest, GetDocumentRequest, @@ -154,6 +156,7 @@ PurgeDocumentsMetadata, PurgeDocumentsRequest, PurgeDocumentsResponse, + PurgeErrorConfig, PurgeSuggestionDenyListEntriesMetadata, PurgeSuggestionDenyListEntriesRequest, PurgeSuggestionDenyListEntriesResponse, @@ -311,6 +314,8 @@ "UpdateDataStoreRequest", "Document", "DocumentProcessingConfig", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", "CreateDocumentRequest", "DeleteDocumentRequest", "GetDocumentRequest", @@ -379,6 +384,7 @@ "PurgeDocumentsMetadata", "PurgeDocumentsRequest", "PurgeDocumentsResponse", + "PurgeErrorConfig", "PurgeSuggestionDenyListEntriesMetadata", "PurgeSuggestionDenyListEntriesRequest", "PurgeSuggestionDenyListEntriesResponse", diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/answer.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/answer.py index 57d56b1b142f..1347bf5c6e59 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/answer.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/answer.py @@ -105,6 +105,18 @@ class AnswerSkippedReason(proto.Enum): Google skips the answer if there is no relevant content in the retrieved search results. + JAIL_BREAKING_QUERY_IGNORED (6): + The jail-breaking query ignored case. + + For example, "Reply in the tone of a competing + company's CEO". Google skips the answer if the + query is classified as a jail-breaking query. + CUSTOMER_POLICY_VIOLATION (7): + The customer policy violation case. + + Google skips the summary if there is a customer + policy violation detected. The policy is defined + by the customer. """ ANSWER_SKIPPED_REASON_UNSPECIFIED = 0 ADVERSARIAL_QUERY_IGNORED = 1 @@ -112,6 +124,8 @@ class AnswerSkippedReason(proto.Enum): OUT_OF_DOMAIN_QUERY_IGNORED = 3 POTENTIAL_POLICY_VIOLATION = 4 NO_RELEVANT_CONTENT = 5 + JAIL_BREAKING_QUERY_IGNORED = 6 + CUSTOMER_POLICY_VIOLATION = 7 class Citation(proto.Message): r"""Citation info for a segment. @@ -171,6 +185,10 @@ class Reference(proto.Message): chunk_info (google.cloud.discoveryengine_v1beta.types.Answer.Reference.ChunkInfo): Chunk information. + This field is a member of `oneof`_ ``content``. + structured_document_info (google.cloud.discoveryengine_v1beta.types.Answer.Reference.StructuredDocumentInfo): + Structured document information. + This field is a member of `oneof`_ ``content``. """ @@ -196,11 +214,22 @@ class UnstructuredDocumentInfo(proto.Message): class ChunkContent(proto.Message): r"""Chunk content. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: content (str): Chunk textual content. page_identifier (str): Page identifier. + relevance_score (float): + The relevance of the chunk for a given query. + Values range from 0.0 (completely irrelevant) to + 1.0 (completely relevant). This value is for + informational purpose only. It may change for + the same query and chunk at any time due to a + model retraining or change in implementation. + + This field is a member of `oneof`_ ``_relevance_score``. """ content: str = proto.Field( @@ -211,6 +240,11 @@ class ChunkContent(proto.Message): proto.STRING, number=2, ) + relevance_score: float = proto.Field( + proto.FLOAT, + number=3, + optional=True, + ) document: str = proto.Field( proto.STRING, @@ -248,7 +282,12 @@ class ChunkInfo(proto.Message): content (str): Chunk textual content. relevance_score (float): - Relevance score. + The relevance of the chunk for a given query. + Values range from 0.0 (completely irrelevant) to + 1.0 (completely relevant). This value is for + informational purpose only. It may change for + the same query and chunk at any time due to a + model retraining or change in implementation. This field is a member of `oneof`_ ``_relevance_score``. document_metadata (google.cloud.discoveryengine_v1beta.types.Answer.Reference.ChunkInfo.DocumentMetadata): @@ -316,6 +355,26 @@ class DocumentMetadata(proto.Message): ) ) + class StructuredDocumentInfo(proto.Message): + r"""Structured search information. + + Attributes: + document (str): + Document resource name. + struct_data (google.protobuf.struct_pb2.Struct): + Structured search data. + """ + + document: str = proto.Field( + proto.STRING, + number=1, + ) + struct_data: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + unstructured_document_info: "Answer.Reference.UnstructuredDocumentInfo" = ( proto.Field( proto.MESSAGE, @@ -330,6 +389,14 @@ class DocumentMetadata(proto.Message): oneof="content", message="Answer.Reference.ChunkInfo", ) + structured_document_info: "Answer.Reference.StructuredDocumentInfo" = ( + proto.Field( + proto.MESSAGE, + number=3, + oneof="content", + message="Answer.Reference.StructuredDocumentInfo", + ) + ) class Step(proto.Message): r"""Step information. @@ -456,7 +523,12 @@ class ChunkInfo(proto.Message): content (str): Chunk textual content. relevance_score (float): - Relevance score. + The relevance of the chunk for a given query. + Values range from 0.0 (completely irrelevant) to + 1.0 (completely relevant). This value is for + informational purpose only. It may change for + the same query and chunk at any time due to a + model retraining or change in implementation. This field is a member of `oneof`_ ``_relevance_score``. """ @@ -574,10 +646,13 @@ class Type(proto.Enum): Adversarial query classification type. NON_ANSWER_SEEKING_QUERY (2): Non-answer-seeking query classification type. + JAIL_BREAKING_QUERY (3): + Jail-breaking query classification type. """ TYPE_UNSPECIFIED = 0 ADVERSARIAL_QUERY = 1 NON_ANSWER_SEEKING_QUERY = 2 + JAIL_BREAKING_QUERY = 3 type_: "Answer.QueryUnderstandingInfo.QueryClassificationInfo.Type" = ( proto.Field( diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/control.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/control.py index 9fa4d299a928..f4a052314afd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/control.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/control.py @@ -154,7 +154,7 @@ class Control(proto.Message): associated_serving_config_ids (MutableSequence[str]): Output only. List of all [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] - ids this control is attached to. May take up to 10 minutes + IDs this control is attached to. May take up to 10 minutes to update after changes. solution_type (google.cloud.discoveryengine_v1beta.types.SolutionType): Required. Immutable. What solution the diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversational_search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversational_search_service.py index 9ddc57efdf78..e09b08bc0e50 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/conversational_search_service.py @@ -924,10 +924,13 @@ class Type(proto.Enum): Adversarial query classification type. NON_ANSWER_SEEKING_QUERY (2): Non-answer-seeking query classification type. + JAIL_BREAKING_QUERY (3): + Jail-breaking query classification type. """ TYPE_UNSPECIFIED = 0 ADVERSARIAL_QUERY = 1 NON_ANSWER_SEEKING_QUERY = 2 + JAIL_BREAKING_QUERY = 3 types: MutableSequence[ "AnswerQueryRequest.QueryUnderstandingSpec.QueryClassificationSpec.Type" diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/custom_tuning_model.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/custom_tuning_model.py index 257280080b65..d0f53427c220 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/custom_tuning_model.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/custom_tuning_model.py @@ -74,6 +74,9 @@ class ModelState(proto.Enum): NO_IMPROVEMENT (6): The model training finished successfully but metrics did not improve. + INPUT_VALIDATION_FAILED (7): + Input data validation failed. Model training + didn't start. """ MODEL_STATE_UNSPECIFIED = 0 TRAINING_PAUSED = 1 @@ -82,6 +85,7 @@ class ModelState(proto.Enum): READY_FOR_SERVING = 4 TRAINING_FAILED = 5 NO_IMPROVEMENT = 6 + INPUT_VALIDATION_FAILED = 7 name: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document.py index 00619f5b456e..b09bd5a4a0f1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document.py @@ -19,6 +19,7 @@ from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -91,6 +92,14 @@ class Document(proto.Message): This field is OUTPUT_ONLY. If this field is not populated, it means the document has never been indexed. + index_status (google.cloud.discoveryengine_v1beta.types.Document.IndexStatus): + Output only. The index status of the document. + + - If document is indexed successfully, the index_time field + is populated. + - Otherwise, if document is not indexed due to errors, the + error_samples field is populated. + - Otherwise, index_status is unset. """ class Content(proto.Message): @@ -154,6 +163,31 @@ class Content(proto.Message): number=1, ) + class IndexStatus(proto.Message): + r"""Index status of the document. + + Attributes: + index_time (google.protobuf.timestamp_pb2.Timestamp): + The time when the document was indexed. + If this field is populated, it means the + document has been indexed. + error_samples (MutableSequence[google.rpc.status_pb2.Status]): + A sample of errors encountered while indexing + the document. If this field is populated, the + document is not indexed due to errors. + """ + + index_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + error_samples: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + struct_data: struct_pb2.Struct = proto.Field( proto.MESSAGE, number=4, @@ -196,6 +230,11 @@ class Content(proto.Message): number=13, message=timestamp_pb2.Timestamp, ) + index_status: IndexStatus = proto.Field( + proto.MESSAGE, + number=15, + message=IndexStatus, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_processing_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_processing_config.py index ee06ee0d8a47..2192893da7b4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_processing_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_processing_config.py @@ -62,6 +62,8 @@ class DocumentProcessingConfig(proto.Message): digital parsing and layout parsing are supported. - ``pptx``: Override parsing config for PPTX files, only digital parsing and layout parsing are supported. + - ``xlsx``: Override parsing config for XLSX files, only + digital parsing and layout parsing are supported. """ class ChunkingConfig(proto.Message): diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_service.py index 52f0c0f67f71..39c4a3a68d11 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/document_service.py @@ -18,6 +18,7 @@ from typing import MutableMapping, MutableSequence from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore from google.cloud.discoveryengine_v1beta.types import document as gcd_document @@ -31,6 +32,8 @@ "CreateDocumentRequest", "UpdateDocumentRequest", "DeleteDocumentRequest", + "BatchGetDocumentsMetadataRequest", + "BatchGetDocumentsMetadataResponse", }, ) @@ -271,4 +274,174 @@ class DeleteDocumentRequest(proto.Message): ) +class BatchGetDocumentsMetadataRequest(proto.Message): + r"""Request message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + + Attributes: + parent (str): + Required. The parent branch resource name, such as + ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. + matcher (google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataRequest.Matcher): + Required. Matcher for the + [Document][google.cloud.discoveryengine.v1beta.Document]s. + """ + + class UrisMatcher(proto.Message): + r"""Matcher for the + [Document][google.cloud.discoveryengine.v1beta.Document]s by exact + uris. + + Attributes: + uris (MutableSequence[str]): + The exact URIs to match by. + """ + + uris: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + class Matcher(proto.Message): + r"""Matcher for the + [Document][google.cloud.discoveryengine.v1beta.Document]s. Currently + supports matching by exact URIs. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uris_matcher (google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataRequest.UrisMatcher): + Matcher by exact URIs. + + This field is a member of `oneof`_ ``matcher``. + """ + + uris_matcher: "BatchGetDocumentsMetadataRequest.UrisMatcher" = proto.Field( + proto.MESSAGE, + number=1, + oneof="matcher", + message="BatchGetDocumentsMetadataRequest.UrisMatcher", + ) + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + matcher: Matcher = proto.Field( + proto.MESSAGE, + number=2, + message=Matcher, + ) + + +class BatchGetDocumentsMetadataResponse(proto.Message): + r"""Response message for + [DocumentService.BatchGetDocumentsMetadata][google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata] + method. + + Attributes: + documents_metadata (MutableSequence[google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataResponse.DocumentMetadata]): + The metadata of the + [Document][google.cloud.discoveryengine.v1beta.Document]s. + """ + + class State(proto.Enum): + r"""The state of the + [Document][google.cloud.discoveryengine.v1beta.Document]. + + Values: + STATE_UNSPECIFIED (0): + Should never be set. + INDEXED (1): + The [Document][google.cloud.discoveryengine.v1beta.Document] + is indexed. + NOT_IN_TARGET_SITE (2): + The [Document][google.cloud.discoveryengine.v1beta.Document] + is not indexed because its URI is not in the + [TargetSite][google.cloud.discoveryengine.v1beta.TargetSite]. + NOT_IN_INDEX (3): + The [Document][google.cloud.discoveryengine.v1beta.Document] + is not indexed. + """ + STATE_UNSPECIFIED = 0 + INDEXED = 1 + NOT_IN_TARGET_SITE = 2 + NOT_IN_INDEX = 3 + + class DocumentMetadata(proto.Message): + r"""The metadata of a + [Document][google.cloud.discoveryengine.v1beta.Document]. + + Attributes: + matcher_value (google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue): + The value of the matcher that was used to match the + [Document][google.cloud.discoveryengine.v1beta.Document]. + state (google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataResponse.State): + The state of the document. + last_refreshed_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp of the last time the + [Document][google.cloud.discoveryengine.v1beta.Document] was + last indexed. + data_ingestion_source (str): + The data ingestion source of the + [Document][google.cloud.discoveryengine.v1beta.Document]. + + Allowed values are: + + - ``batch``: Data ingested via Batch API, e.g., + ImportDocuments. + - ``streaming`` Data ingested via Streaming API, e.g., FHIR + streaming. + """ + + class MatcherValue(proto.Message): + r"""The value of the matcher that was used to match the + [Document][google.cloud.discoveryengine.v1beta.Document]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uri (str): + If match by URI, the URI of the + [Document][google.cloud.discoveryengine.v1beta.Document]. + + This field is a member of `oneof`_ ``matcher_value``. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + oneof="matcher_value", + ) + + matcher_value: "BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue" = proto.Field( + proto.MESSAGE, + number=2, + message="BatchGetDocumentsMetadataResponse.DocumentMetadata.MatcherValue", + ) + state: "BatchGetDocumentsMetadataResponse.State" = proto.Field( + proto.ENUM, + number=3, + enum="BatchGetDocumentsMetadataResponse.State", + ) + last_refreshed_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + data_ingestion_source: str = proto.Field( + proto.STRING, + number=5, + ) + + documents_metadata: MutableSequence[DocumentMetadata] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=DocumentMetadata, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/grounded_generation_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/grounded_generation_service.py index 3734ca3d5c68..cbdefdb249b3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/grounded_generation_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/grounded_generation_service.py @@ -180,8 +180,9 @@ class Claim(proto.Message): false. In that case, no grounding check was done for the claim and therefore [citation_indices][google.cloud.discoveryengine.v1beta.CheckGroundingResponse.Claim.citation_indices], + [anti_citation_indices][google.cloud.discoveryengine.v1beta.CheckGroundingResponse.Claim.anti_citation_indices], and - [anti_citation_indices][google.cloud.discoveryengine.v1beta.CheckGroundingResponse.Claim.anti_citation_indices] + [score][google.cloud.discoveryengine.v1beta.CheckGroundingResponse.Claim.score] should not be returned. This field is a member of `oneof`_ ``_grounding_check_required``. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py index 40e7225633b5..b4cb57eb641b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/import_config.py @@ -129,10 +129,10 @@ class BigQuerySource(proto.Message): This field is a member of `oneof`_ ``partition``. project_id (str): - The project ID (can be project # or ID) that - the BigQuery source is in with a length limit of - 128 characters. If not specified, inherits the - project ID from the parent request. + The project ID or the project number that + contains the BigQuery source. Has a length limit + of 128 characters. If not specified, inherits + the project ID from the parent request. dataset_id (str): Required. The BigQuery data set to copy the data from with a length limit of 1,024 @@ -205,9 +205,9 @@ class SpannerSource(proto.Message): Attributes: project_id (str): - The project ID that the Spanner source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Spanner + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The instance ID of the source @@ -428,9 +428,9 @@ class BigtableSource(proto.Message): Attributes: project_id (str): - The project ID that the Bigtable source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Bigtable + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The instance ID of the Cloud @@ -478,6 +478,11 @@ class FhirStoreSource(proto.Message): characters. Can be specified if one wants to have the FhirStore export to a specific Cloud Storage directory. + resource_types (MutableSequence[str]): + The FHIR resource types to import. The resource types should + be a subset of all `supported FHIR resource + types `__. + Default to all supported FHIR resource types if empty. """ fhir_store: str = proto.Field( @@ -488,6 +493,10 @@ class FhirStoreSource(proto.Message): proto.STRING, number=2, ) + resource_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class CloudSqlSource(proto.Message): @@ -495,9 +504,9 @@ class CloudSqlSource(proto.Message): Attributes: project_id (str): - The project ID that the Cloud SQL source is - in with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the Cloud SQL + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. instance_id (str): Required. The Cloud SQL instance to copy the @@ -555,9 +564,9 @@ class AlloyDbSource(proto.Message): Attributes: project_id (str): - The project ID that the AlloyDB source is in - with a length limit of 128 characters. If not - specified, inherits the project ID from the + The project ID that contains the AlloyDB + source. Has a length limit of 128 characters. If + not specified, inherits the project ID from the parent request. location_id (str): Required. The AlloyDB location to copy the diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/purge_config.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/purge_config.py index 5a7d3c02f8af..829032161fdd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/purge_config.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/purge_config.py @@ -21,12 +21,15 @@ from google.rpc import status_pb2 # type: ignore import proto # type: ignore +from google.cloud.discoveryengine_v1beta.types import import_config + __protobuf__ = proto.module( package="google.cloud.discoveryengine.v1beta", manifest={ "PurgeUserEventsRequest", "PurgeUserEventsResponse", "PurgeUserEventsMetadata", + "PurgeErrorConfig", "PurgeDocumentsRequest", "PurgeDocumentsResponse", "PurgeDocumentsMetadata", @@ -151,24 +154,100 @@ class PurgeUserEventsMetadata(proto.Message): ) +class PurgeErrorConfig(proto.Message): + r"""Configuration of destination for Purge related errors. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_prefix (str): + Cloud Storage prefix for purge errors. This must be an + empty, existing Cloud Storage directory. Purge errors are + written to sharded files in this directory, one per line, as + a JSON-encoded ``google.rpc.Status`` message. + + This field is a member of `oneof`_ ``destination``. + """ + + gcs_prefix: str = proto.Field( + proto.STRING, + number=1, + oneof="destination", + ) + + class PurgeDocumentsRequest(proto.Message): r"""Request message for [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1beta.DocumentService.PurgeDocuments] method. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: + gcs_source (google.cloud.discoveryengine_v1beta.types.GcsSource): + Cloud Storage location for the input content. Supported + ``data_schema``: + + - ``document_id``: One valid + [Document.id][google.cloud.discoveryengine.v1beta.Document.id] + per line. + + This field is a member of `oneof`_ ``source``. + inline_source (google.cloud.discoveryengine_v1beta.types.PurgeDocumentsRequest.InlineSource): + Inline source for the input content for + purge. + + This field is a member of `oneof`_ ``source``. parent (str): Required. The parent resource name, such as ``projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}``. filter (str): Required. Filter matching documents to purge. Only currently supported value is ``*`` (all items). + error_config (google.cloud.discoveryengine_v1beta.types.PurgeErrorConfig): + The desired location of errors incurred + during the purge. force (bool): Actually performs the purge. If ``force`` is set to false, return the expected purge count without deleting any documents. """ + class InlineSource(proto.Message): + r"""The inline source for the input config for + [DocumentService.PurgeDocuments][google.cloud.discoveryengine.v1beta.DocumentService.PurgeDocuments] + method. + + Attributes: + documents (MutableSequence[str]): + Required. A list of full resource name of documents to + purge. In the format + ``projects/*/locations/*/collections/*/dataStores/*/branches/*/documents/*``. + Recommended max of 100 items. + """ + + documents: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + gcs_source: import_config.GcsSource = proto.Field( + proto.MESSAGE, + number=5, + oneof="source", + message=import_config.GcsSource, + ) + inline_source: InlineSource = proto.Field( + proto.MESSAGE, + number=6, + oneof="source", + message=InlineSource, + ) parent: str = proto.Field( proto.STRING, number=1, @@ -177,6 +256,11 @@ class PurgeDocumentsRequest(proto.Message): proto.STRING, number=2, ) + error_config: "PurgeErrorConfig" = proto.Field( + proto.MESSAGE, + number=7, + message="PurgeErrorConfig", + ) force: bool = proto.Field( proto.BOOL, number=3, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py index 710ff4c38cba..8db64de1bb12 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py @@ -139,9 +139,12 @@ class SearchRequest(proto.Message): object. Leave it unset if ordered by relevance. ``order_by`` expression is case-sensitive. - For more information on ordering for retail search, see - `Ordering `__ - + For more information on ordering the website search results, + see `Order web search + results `__. + For more information on ordering the healthcare search + results, see `Order healthcare search + results `__. If this field is unrecognizable, an ``INVALID_ARGUMENT`` is returned. user_info (google.cloud.discoveryengine_v1beta.types.UserInfo): @@ -329,8 +332,39 @@ class SearchRequest(proto.Message): Session specification. Can be used only when ``session`` is set. + relevance_threshold (google.cloud.discoveryengine_v1beta.types.SearchRequest.RelevanceThreshold): + The relevance threshold of the search + results. + Default to Google defined threshold, leveraging + a balance of precision and recall to deliver + both highly accurate results and comprehensive + coverage of relevant information. """ + class RelevanceThreshold(proto.Enum): + r"""The relevance threshold of the search results. The higher + relevance threshold is, the higher relevant results are shown + and the less number of results are returned. + + Values: + RELEVANCE_THRESHOLD_UNSPECIFIED (0): + Default value. In this case, server behavior + defaults to Google defined threshold. + LOWEST (1): + Lowest relevance threshold. + LOW (2): + Low relevance threshold. + MEDIUM (3): + Medium relevance threshold. + HIGH (4): + High relevance threshold. + """ + RELEVANCE_THRESHOLD_UNSPECIFIED = 0 + LOWEST = 1 + LOW = 2 + MEDIUM = 3 + HIGH = 4 + class ImageQuery(proto.Message): r"""Specifies the image query input. @@ -980,6 +1014,14 @@ class SummarySpec(proto.Message): navigational queries. If this field is set to ``true``, we skip generating summaries for non-summary seeking queries and return fallback messages instead. + ignore_low_relevant_content (bool): + Specifies whether to filter out queries that have low + relevance. The default value is ``false``. + + If this field is set to ``false``, all search results are + used regardless of relevance to generate answers. If set to + ``true``, only queries with high relevance search results + will generate answers. model_prompt_spec (google.cloud.discoveryengine_v1beta.types.SearchRequest.ContentSearchSpec.SummarySpec.ModelPromptSpec): If specified, the spec will be used to modify the prompt provided to the LLM. @@ -1057,6 +1099,10 @@ class ModelSpec(proto.Message): proto.BOOL, number=4, ) + ignore_low_relevant_content: bool = proto.Field( + proto.BOOL, + number=9, + ) model_prompt_spec: "SearchRequest.ContentSearchSpec.SummarySpec.ModelPromptSpec" = proto.Field( proto.MESSAGE, number=5, @@ -1519,6 +1565,11 @@ class SessionSpec(proto.Message): number=42, message=SessionSpec, ) + relevance_threshold: RelevanceThreshold = proto.Field( + proto.ENUM, + number=44, + enum=RelevanceThreshold, + ) class SearchResponse(proto.Message): @@ -1545,7 +1596,8 @@ class SearchResponse(proto.Message): A unique search token. This should be included in the [UserEvent][google.cloud.discoveryengine.v1beta.UserEvent] logs resulting from this search, which enables accurate - attribution of search model performance. + attribution of search model performance. This also helps to + identify a request during the customer support scenarios. redirect_uri (str): The URI of a customer-defined redirect page. If redirect action is triggered, no search is performed, and only @@ -1585,6 +1637,9 @@ class SearchResponse(proto.Message): Only set if [SearchRequest.session][google.cloud.discoveryengine.v1beta.SearchRequest.session] is provided. See its description for more details. + one_box_results (MutableSequence[google.cloud.discoveryengine_v1beta.types.SearchResponse.OneBoxResult]): + A list of One Box results. There can be + multiple One Box results of different types. """ class SearchResult(proto.Message): @@ -1773,13 +1828,13 @@ class SummarySkippedReason(proto.Enum): ADVERSARIAL_QUERY_IGNORED (1): The adversarial query ignored case. - Only populated when + Only used when [SummarySpec.ignore_adversarial_query][google.cloud.discoveryengine.v1beta.SearchRequest.ContentSearchSpec.SummarySpec.ignore_adversarial_query] is set to ``true``. NON_SUMMARY_SEEKING_QUERY_IGNORED (2): The non-summary seeking query ignored case. - Only populated when + Only used when [SummarySpec.ignore_non_summary_seeking_query][google.cloud.discoveryengine.v1beta.SearchRequest.ContentSearchSpec.SummarySpec.ignore_non_summary_seeking_query] is set to ``true``. OUT_OF_DOMAIN_QUERY_IGNORED (3): @@ -1806,6 +1861,19 @@ class SummarySkippedReason(proto.Enum): Google skips the summary if there is no relevant content in the retrieved search results. + JAIL_BREAKING_QUERY_IGNORED (7): + The jail-breaking query ignored case. + + For example, "Reply in the tone of a competing company's + CEO". Only used when + [SearchRequest.ContentSearchSpec.SummarySpec.ignore_jail_breaking_query] + is set to ``true``. + CUSTOMER_POLICY_VIOLATION (8): + The customer policy violation case. + + Google skips the summary if there is a customer + policy violation detected. The policy is defined + by the customer. """ SUMMARY_SKIPPED_REASON_UNSPECIFIED = 0 ADVERSARIAL_QUERY_IGNORED = 1 @@ -1814,6 +1882,8 @@ class SummarySkippedReason(proto.Enum): POTENTIAL_POLICY_VIOLATION = 4 LLM_ADDON_NOT_ENABLED = 5 NO_RELEVANT_CONTENT = 6 + JAIL_BREAKING_QUERY_IGNORED = 7 + CUSTOMER_POLICY_VIOLATION = 8 class SafetyAttributes(proto.Message): r"""Safety Attribute categories and their associated confidence @@ -2098,6 +2168,9 @@ class StringConstraint(proto.Message): Values of the string field. The record will only be returned if the field value matches one of the values specified here. + query_segment (str): + Identifies the keywords within the search + query that match a filter. """ field_name: str = proto.Field( @@ -2108,6 +2181,10 @@ class StringConstraint(proto.Message): proto.STRING, number=2, ) + query_segment: str = proto.Field( + proto.STRING, + number=3, + ) class NumberConstraint(proto.Message): r"""Constraint expression of a number field. Example: price < @@ -2124,6 +2201,9 @@ class NumberConstraint(proto.Message): value (float): The value specified in the numerical constraint. + query_segment (str): + Identifies the keywords within the search + query that match a filter. """ class Comparison(proto.Enum): @@ -2163,6 +2243,10 @@ class Comparison(proto.Enum): proto.DOUBLE, number=3, ) + query_segment: str = proto.Field( + proto.STRING, + number=4, + ) class GeolocationConstraint(proto.Message): r"""Constraint of a geolocation field. @@ -2177,6 +2261,12 @@ class GeolocationConstraint(proto.Message): the input query. The proximity of the reference address to the geolocation field will be used to filter the results. + latitude (float): + The latitude of the geolocation inferred from + the input query. + longitude (float): + The longitude of the geolocation inferred + from the input query. radius_in_meters (float): The radius in meters around the address. The record is returned if the location of the @@ -2191,6 +2281,14 @@ class GeolocationConstraint(proto.Message): proto.STRING, number=2, ) + latitude: float = proto.Field( + proto.DOUBLE, + number=4, + ) + longitude: float = proto.Field( + proto.DOUBLE, + number=5, + ) radius_in_meters: float = proto.Field( proto.FLOAT, number=3, @@ -2344,6 +2442,48 @@ class SessionInfo(proto.Message): number=2, ) + class OneBoxResult(proto.Message): + r"""OneBoxResult is a holder for all results of specific type + that we want to display in UI differently. + + Attributes: + one_box_type (google.cloud.discoveryengine_v1beta.types.SearchResponse.OneBoxResult.OneBoxType): + The type of One Box result. + search_results (MutableSequence[google.cloud.discoveryengine_v1beta.types.SearchResponse.SearchResult]): + The search results for this One Box. + """ + + class OneBoxType(proto.Enum): + r"""The type of One Box result. + + Values: + ONE_BOX_TYPE_UNSPECIFIED (0): + Default value. Should not be used. + PEOPLE (1): + One Box result contains people results. + ORGANIZATION (2): + One Box result contains organization results. + SLACK (3): + One Box result contains slack results. + """ + ONE_BOX_TYPE_UNSPECIFIED = 0 + PEOPLE = 1 + ORGANIZATION = 2 + SLACK = 3 + + one_box_type: "SearchResponse.OneBoxResult.OneBoxType" = proto.Field( + proto.ENUM, + number=1, + enum="SearchResponse.OneBoxResult.OneBoxType", + ) + search_results: MutableSequence[ + "SearchResponse.SearchResult" + ] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="SearchResponse.SearchResult", + ) + @property def raw_page(self): return self @@ -2414,6 +2554,11 @@ def raw_page(self): number=19, message=SessionInfo, ) + one_box_results: MutableSequence[OneBoxResult] = proto.RepeatedField( + proto.MESSAGE, + number=20, + message=OneBoxResult, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py index 23500fae2a57..5c959d423932 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/user_event.py @@ -673,6 +673,9 @@ class DocumentInfo(proto.Message): The promotion IDs associated with this Document. Currently, this field is restricted to at most one ID. + joined (bool): + Output only. Whether the referenced Document + can be found in the data store. """ id: str = proto.Field( @@ -699,6 +702,10 @@ class DocumentInfo(proto.Message): proto.STRING, number=4, ) + joined: bool = proto.Field( + proto.BOOL, + number=5, + ) class PanelInfo(proto.Message): diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_batch_get_documents_metadata_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_batch_get_documents_metadata_async.py new file mode 100644 index 000000000000..ff9157e921d6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_batch_get_documents_metadata_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetDocumentsMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DocumentService_BatchGetDocumentsMetadata_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = await client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DocumentService_BatchGetDocumentsMetadata_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_batch_get_documents_metadata_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_batch_get_documents_metadata_sync.py new file mode 100644 index 000000000000..84861f6a63ac --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_batch_get_documents_metadata_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetDocumentsMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_DocumentService_BatchGetDocumentsMetadata_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_DocumentService_BatchGetDocumentsMetadata_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_async.py index dded80300569..7e27f4affa4f 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_async.py +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_async.py @@ -39,7 +39,11 @@ async def sample_purge_documents(): client = discoveryengine_v1.DocumentServiceAsyncClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_sync.py index d0d2932cf202..d051616047f7 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_sync.py +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_document_service_purge_documents_sync.py @@ -39,7 +39,11 @@ def sample_purge_documents(): client = discoveryengine_v1.DocumentServiceClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_list_custom_models_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_list_custom_models_async.py new file mode 100644 index 000000000000..9d7a80ef3114 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_list_custom_models_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCustomModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SearchTuningService_ListCustomModels_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_list_custom_models(): + # Create a client + client = discoveryengine_v1.SearchTuningServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListCustomModelsRequest( + data_store="data_store_value", + ) + + # Make the request + response = await client.list_custom_models(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SearchTuningService_ListCustomModels_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_list_custom_models_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_list_custom_models_sync.py new file mode 100644 index 000000000000..faedb982f00e --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_list_custom_models_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCustomModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SearchTuningService_ListCustomModels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_list_custom_models(): + # Create a client + client = discoveryengine_v1.SearchTuningServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.ListCustomModelsRequest( + data_store="data_store_value", + ) + + # Make the request + response = client.list_custom_models(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SearchTuningService_ListCustomModels_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_train_custom_model_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_train_custom_model_async.py new file mode 100644 index 000000000000..ba97a5e74862 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_train_custom_model_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TrainCustomModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SearchTuningService_TrainCustomModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_train_custom_model(): + # Create a client + client = discoveryengine_v1.SearchTuningServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.TrainCustomModelRequest( + data_store="data_store_value", + ) + + # Make the request + operation = client.train_custom_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SearchTuningService_TrainCustomModel_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_train_custom_model_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_train_custom_model_sync.py new file mode 100644 index 000000000000..422d9bcdcc6d --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_search_tuning_service_train_custom_model_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TrainCustomModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_SearchTuningService_TrainCustomModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_train_custom_model(): + # Create a client + client = discoveryengine_v1.SearchTuningServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.TrainCustomModelRequest( + data_store="data_store_value", + ) + + # Make the request + operation = client.train_custom_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_SearchTuningService_TrainCustomModel_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_purge_user_events_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_purge_user_events_async.py new file mode 100644 index 000000000000..13cc76da400c --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_purge_user_events_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PurgeUserEvents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_UserEventService_PurgeUserEvents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +async def sample_purge_user_events(): + # Create a client + client = discoveryengine_v1.UserEventServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1.PurgeUserEventsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_user_events(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_UserEventService_PurgeUserEvents_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_purge_user_events_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_purge_user_events_sync.py new file mode 100644 index 000000000000..eeab6a2ea2b3 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1_generated_user_event_service_purge_user_events_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PurgeUserEvents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1_generated_UserEventService_PurgeUserEvents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1 + + +def sample_purge_user_events(): + # Create a client + client = discoveryengine_v1.UserEventServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1.PurgeUserEventsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Make the request + operation = client.purge_user_events(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END discoveryengine_v1_generated_UserEventService_PurgeUserEvents_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_async.py new file mode 100644 index 000000000000..8a98d99976c6 --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetDocumentsMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_DocumentService_BatchGetDocumentsMetadata_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +async def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1alpha.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = await client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_DocumentService_BatchGetDocumentsMetadata_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_sync.py new file mode 100644 index 000000000000..972a126db36b --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetDocumentsMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1alpha_generated_DocumentService_BatchGetDocumentsMetadata_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1alpha + + +def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1alpha.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1alpha.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1alpha_generated_DocumentService_BatchGetDocumentsMetadata_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_async.py new file mode 100644 index 000000000000..37e8933b085d --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetDocumentsMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_DocumentService_BatchGetDocumentsMetadata_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +async def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1beta.DocumentServiceAsyncClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = await client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_DocumentService_BatchGetDocumentsMetadata_async] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_sync.py new file mode 100644 index 000000000000..2164801056cc --- /dev/null +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetDocumentsMetadata +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-discoveryengine + + +# [START discoveryengine_v1beta_generated_DocumentService_BatchGetDocumentsMetadata_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import discoveryengine_v1beta + + +def sample_batch_get_documents_metadata(): + # Create a client + client = discoveryengine_v1beta.DocumentServiceClient() + + # Initialize request argument(s) + request = discoveryengine_v1beta.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Make the request + response = client.batch_get_documents_metadata(request=request) + + # Handle the response + print(response) + +# [END discoveryengine_v1beta_generated_DocumentService_BatchGetDocumentsMetadata_sync] diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_async.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_async.py index 204de42ab390..d57fdf0e3be2 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_async.py +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_async.py @@ -39,7 +39,11 @@ async def sample_purge_documents(): client = discoveryengine_v1beta.DocumentServiceAsyncClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1beta.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1beta.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_sync.py b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_sync.py index d4538c05c707..6819052d64c8 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_sync.py +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/discoveryengine_v1beta_generated_document_service_purge_documents_sync.py @@ -39,7 +39,11 @@ def sample_purge_documents(): client = discoveryengine_v1beta.DocumentServiceClient() # Initialize request argument(s) + gcs_source = discoveryengine_v1beta.GcsSource() + gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] + request = discoveryengine_v1beta.PurgeDocumentsRequest( + gcs_source=gcs_source, parent="parent_value", filter="filter_value", ) diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json index 69944fc7ce8a..7c8ee0acc6ee 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json @@ -4541,6 +4541,167 @@ ], "title": "discoveryengine_v1_generated_data_store_service_update_data_store_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceAsyncClient.batch_get_documents_metadata", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "BatchGetDocumentsMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataResponse", + "shortName": "batch_get_documents_metadata" + }, + "description": "Sample for BatchGetDocumentsMetadata", + "file": "discoveryengine_v1_generated_document_service_batch_get_documents_metadata_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_BatchGetDocumentsMetadata_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_batch_get_documents_metadata_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.DocumentServiceClient.batch_get_documents_metadata", + "method": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService.BatchGetDocumentsMetadata", + "service": { + "fullName": "google.cloud.discoveryengine.v1.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "BatchGetDocumentsMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1.types.BatchGetDocumentsMetadataResponse", + "shortName": "batch_get_documents_metadata" + }, + "description": "Sample for BatchGetDocumentsMetadata", + "file": "discoveryengine_v1_generated_document_service_batch_get_documents_metadata_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_DocumentService_BatchGetDocumentsMetadata_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_document_service_batch_get_documents_metadata_sync.py" + }, { "canonical": true, "clientMethod": { @@ -5393,12 +5554,12 @@ "regionTag": "discoveryengine_v1_generated_DocumentService_PurgeDocuments_async", "segments": [ { - "end": 56, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 60, "start": 27, "type": "SHORT" }, @@ -5408,18 +5569,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], @@ -5469,12 +5630,12 @@ "regionTag": "discoveryengine_v1_generated_DocumentService_PurgeDocuments_sync", "segments": [ { - "end": 56, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 60, "start": 27, "type": "SHORT" }, @@ -5484,18 +5645,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], @@ -8090,22 +8251,22 @@ "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", - "shortName": "SiteSearchEngineServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceAsyncClient", + "shortName": "SearchTuningServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.batch_create_target_sites", + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceAsyncClient.list_custom_models", "method": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchCreateTargetSites", + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels", "service": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", - "shortName": "SiteSearchEngineService" + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService", + "shortName": "SearchTuningService" }, - "shortName": "BatchCreateTargetSites" + "shortName": "ListCustomModels" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.BatchCreateTargetSitesRequest" + "type": "google.cloud.discoveryengine_v1.types.ListCustomModelsRequest" }, { "name": "retry", @@ -8120,22 +8281,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "batch_create_target_sites" + "resultType": "google.cloud.discoveryengine_v1.types.ListCustomModelsResponse", + "shortName": "list_custom_models" }, - "description": "Sample for BatchCreateTargetSites", - "file": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_async.py", + "description": "Sample for ListCustomModels", + "file": "discoveryengine_v1_generated_search_tuning_service_list_custom_models_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchCreateTargetSites_async", + "regionTag": "discoveryengine_v1_generated_SearchTuningService_ListCustomModels_async", "segments": [ { - "end": 60, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 51, "start": 27, "type": "SHORT" }, @@ -8145,43 +8306,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_async.py" + "title": "discoveryengine_v1_generated_search_tuning_service_list_custom_models_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", - "shortName": "SiteSearchEngineServiceClient" + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceClient", + "shortName": "SearchTuningServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.batch_create_target_sites", + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceClient.list_custom_models", "method": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchCreateTargetSites", + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService.ListCustomModels", "service": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", - "shortName": "SiteSearchEngineService" + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService", + "shortName": "SearchTuningService" }, - "shortName": "BatchCreateTargetSites" + "shortName": "ListCustomModels" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.BatchCreateTargetSitesRequest" + "type": "google.cloud.discoveryengine_v1.types.ListCustomModelsRequest" }, { "name": "retry", @@ -8196,22 +8357,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "batch_create_target_sites" + "resultType": "google.cloud.discoveryengine_v1.types.ListCustomModelsResponse", + "shortName": "list_custom_models" }, - "description": "Sample for BatchCreateTargetSites", - "file": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_sync.py", + "description": "Sample for ListCustomModels", + "file": "discoveryengine_v1_generated_search_tuning_service_list_custom_models_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchCreateTargetSites_sync", + "regionTag": "discoveryengine_v1_generated_SearchTuningService_ListCustomModels_sync", "segments": [ { - "end": 60, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 51, "start": 27, "type": "SHORT" }, @@ -8221,44 +8382,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_sync.py" + "title": "discoveryengine_v1_generated_search_tuning_service_list_custom_models_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", - "shortName": "SiteSearchEngineServiceAsyncClient" + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceAsyncClient", + "shortName": "SearchTuningServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.batch_verify_target_sites", + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceAsyncClient.train_custom_model", "method": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchVerifyTargetSites", + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService.TrainCustomModel", "service": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", - "shortName": "SiteSearchEngineService" + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService", + "shortName": "SearchTuningService" }, - "shortName": "BatchVerifyTargetSites" + "shortName": "TrainCustomModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.BatchVerifyTargetSitesRequest" + "type": "google.cloud.discoveryengine_v1.types.TrainCustomModelRequest" }, { "name": "retry", @@ -8274,13 +8435,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "batch_verify_target_sites" + "shortName": "train_custom_model" }, - "description": "Sample for BatchVerifyTargetSites", - "file": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_async.py", + "description": "Sample for TrainCustomModel", + "file": "discoveryengine_v1_generated_search_tuning_service_train_custom_model_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchVerifyTargetSites_async", + "regionTag": "discoveryengine_v1_generated_SearchTuningService_TrainCustomModel_async", "segments": [ { "end": 55, @@ -8313,28 +8474,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_async.py" + "title": "discoveryengine_v1_generated_search_tuning_service_train_custom_model_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", - "shortName": "SiteSearchEngineServiceClient" + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceClient", + "shortName": "SearchTuningServiceClient" }, - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.batch_verify_target_sites", + "fullName": "google.cloud.discoveryengine_v1.SearchTuningServiceClient.train_custom_model", "method": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchVerifyTargetSites", + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService.TrainCustomModel", "service": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", - "shortName": "SiteSearchEngineService" + "fullName": "google.cloud.discoveryengine.v1.SearchTuningService", + "shortName": "SearchTuningService" }, - "shortName": "BatchVerifyTargetSites" + "shortName": "TrainCustomModel" }, "parameters": [ { "name": "request", - "type": "google.cloud.discoveryengine_v1.types.BatchVerifyTargetSitesRequest" + "type": "google.cloud.discoveryengine_v1.types.TrainCustomModelRequest" }, { "name": "retry", @@ -8350,13 +8511,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "batch_verify_target_sites" + "shortName": "train_custom_model" }, - "description": "Sample for BatchVerifyTargetSites", - "file": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_sync.py", + "description": "Sample for TrainCustomModel", + "file": "discoveryengine_v1_generated_search_tuning_service_train_custom_model_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchVerifyTargetSites_sync", + "regionTag": "discoveryengine_v1_generated_SearchTuningService_TrainCustomModel_sync", "segments": [ { "end": 55, @@ -8389,7 +8550,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_sync.py" + "title": "discoveryengine_v1_generated_search_tuning_service_train_custom_model_sync.py" }, { "canonical": true, @@ -8399,14 +8560,320 @@ "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", "shortName": "SiteSearchEngineServiceAsyncClient" }, - "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.create_target_site", + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.batch_create_target_sites", "method": { - "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.CreateTargetSite", + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchCreateTargetSites", "service": { "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", "shortName": "SiteSearchEngineService" }, - "shortName": "CreateTargetSite" + "shortName": "BatchCreateTargetSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.BatchCreateTargetSitesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_create_target_sites" + }, + "description": "Sample for BatchCreateTargetSites", + "file": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchCreateTargetSites_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.batch_create_target_sites", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchCreateTargetSites", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "BatchCreateTargetSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.BatchCreateTargetSitesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_create_target_sites" + }, + "description": "Sample for BatchCreateTargetSites", + "file": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchCreateTargetSites_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_site_search_engine_service_batch_create_target_sites_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.batch_verify_target_sites", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchVerifyTargetSites", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "BatchVerifyTargetSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.BatchVerifyTargetSitesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "batch_verify_target_sites" + }, + "description": "Sample for BatchVerifyTargetSites", + "file": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchVerifyTargetSites_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient", + "shortName": "SiteSearchEngineServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceClient.batch_verify_target_sites", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.BatchVerifyTargetSites", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "BatchVerifyTargetSites" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.BatchVerifyTargetSitesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "batch_verify_target_sites" + }, + "description": "Sample for BatchVerifyTargetSites", + "file": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_SiteSearchEngineService_BatchVerifyTargetSites_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_site_search_engine_service_batch_verify_target_sites_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient", + "shortName": "SiteSearchEngineServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.SiteSearchEngineServiceAsyncClient.create_target_site", + "method": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService.CreateTargetSite", + "service": { + "fullName": "google.cloud.discoveryengine.v1.SiteSearchEngineService", + "shortName": "SiteSearchEngineService" + }, + "shortName": "CreateTargetSite" }, "parameters": [ { @@ -10283,6 +10750,159 @@ ], "title": "discoveryengine_v1_generated_user_event_service_import_user_events_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceAsyncClient", + "shortName": "UserEventServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceAsyncClient.purge_user_events", + "method": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService.PurgeUserEvents", + "service": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService", + "shortName": "UserEventService" + }, + "shortName": "PurgeUserEvents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.PurgeUserEventsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "purge_user_events" + }, + "description": "Sample for PurgeUserEvents", + "file": "discoveryengine_v1_generated_user_event_service_purge_user_events_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_UserEventService_PurgeUserEvents_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_user_event_service_purge_user_events_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceClient", + "shortName": "UserEventServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1.UserEventServiceClient.purge_user_events", + "method": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService.PurgeUserEvents", + "service": { + "fullName": "google.cloud.discoveryengine.v1.UserEventService", + "shortName": "UserEventService" + }, + "shortName": "PurgeUserEvents" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1.types.PurgeUserEventsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "purge_user_events" + }, + "description": "Sample for PurgeUserEvents", + "file": "discoveryengine_v1_generated_user_event_service_purge_user_events_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1_generated_UserEventService_PurgeUserEvents_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1_generated_user_event_service_purge_user_events_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json index 2884e903e0cf..4d014b08daf6 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json @@ -5507,6 +5507,167 @@ ], "title": "discoveryengine_v1alpha_generated_data_store_service_update_document_processing_config_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.DocumentServiceAsyncClient.batch_get_documents_metadata", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "BatchGetDocumentsMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataResponse", + "shortName": "batch_get_documents_metadata" + }, + "description": "Sample for BatchGetDocumentsMetadata", + "file": "discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_DocumentService_BatchGetDocumentsMetadata_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1alpha.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1alpha.DocumentServiceClient.batch_get_documents_metadata", + "method": { + "fullName": "google.cloud.discoveryengine.v1alpha.DocumentService.BatchGetDocumentsMetadata", + "service": { + "fullName": "google.cloud.discoveryengine.v1alpha.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "BatchGetDocumentsMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1alpha.types.BatchGetDocumentsMetadataResponse", + "shortName": "batch_get_documents_metadata" + }, + "description": "Sample for BatchGetDocumentsMetadata", + "file": "discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1alpha_generated_DocumentService_BatchGetDocumentsMetadata_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1alpha_generated_document_service_batch_get_documents_metadata_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json index cff39c47dc30..c8efdd7d5106 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json @@ -4541,6 +4541,167 @@ ], "title": "discoveryengine_v1beta_generated_data_store_service_update_data_store_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient", + "shortName": "DocumentServiceAsyncClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceAsyncClient.batch_get_documents_metadata", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "BatchGetDocumentsMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataResponse", + "shortName": "batch_get_documents_metadata" + }, + "description": "Sample for BatchGetDocumentsMetadata", + "file": "discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_BatchGetDocumentsMetadata_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient", + "shortName": "DocumentServiceClient" + }, + "fullName": "google.cloud.discoveryengine_v1beta.DocumentServiceClient.batch_get_documents_metadata", + "method": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService.BatchGetDocumentsMetadata", + "service": { + "fullName": "google.cloud.discoveryengine.v1beta.DocumentService", + "shortName": "DocumentService" + }, + "shortName": "BatchGetDocumentsMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.discoveryengine_v1beta.types.BatchGetDocumentsMetadataResponse", + "shortName": "batch_get_documents_metadata" + }, + "description": "Sample for BatchGetDocumentsMetadata", + "file": "discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "discoveryengine_v1beta_generated_DocumentService_BatchGetDocumentsMetadata_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "discoveryengine_v1beta_generated_document_service_batch_get_documents_metadata_sync.py" + }, { "canonical": true, "clientMethod": { @@ -5393,12 +5554,12 @@ "regionTag": "discoveryengine_v1beta_generated_DocumentService_PurgeDocuments_async", "segments": [ { - "end": 56, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 60, "start": 27, "type": "SHORT" }, @@ -5408,18 +5569,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], @@ -5469,12 +5630,12 @@ "regionTag": "discoveryengine_v1beta_generated_DocumentService_PurgeDocuments_sync", "segments": [ { - "end": 56, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 60, "start": 27, "type": "SHORT" }, @@ -5484,18 +5645,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], diff --git a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1_keywords.py b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1_keywords.py index c9797094604f..b89b5e57f538 100644 --- a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1_keywords.py +++ b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1_keywords.py @@ -41,6 +41,7 @@ class discoveryengineCallTransformer(cst.CSTTransformer): METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'answer_query': ('serving_config', 'query', 'session', 'safety_spec', 'related_questions_spec', 'answer_generation_spec', 'search_spec', 'query_understanding_spec', 'asynchronous_mode', 'user_pseudo_id', 'user_labels', ), 'batch_create_target_sites': ('parent', 'requests', ), + 'batch_get_documents_metadata': ('parent', 'matcher', ), 'batch_verify_target_sites': ('parent', ), 'check_grounding': ('grounding_config', 'answer_candidate', 'facts', 'grounding_spec', 'user_labels', ), 'collect_user_event': ('parent', 'user_event', 'uri', 'ets', ), @@ -48,7 +49,7 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'converse_conversation': ('name', 'query', 'serving_config', 'conversation', 'safe_search', 'user_labels', 'summary_spec', 'filter', 'boost_spec', ), 'create_control': ('parent', 'control', 'control_id', ), 'create_conversation': ('parent', 'conversation', ), - 'create_data_store': ('parent', 'data_store', 'data_store_id', 'create_advanced_site_search', ), + 'create_data_store': ('parent', 'data_store', 'data_store_id', 'create_advanced_site_search', 'skip_default_schema_creation', ), 'create_document': ('parent', 'document', 'document_id', ), 'create_engine': ('parent', 'engine', 'engine_id', ), 'create_schema': ('parent', 'schema', 'schema_id', ), @@ -81,6 +82,7 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'import_user_events': ('parent', 'inline_source', 'gcs_source', 'bigquery_source', 'error_config', ), 'list_controls': ('parent', 'page_size', 'page_token', 'filter', ), 'list_conversations': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_custom_models': ('data_store', ), 'list_data_stores': ('parent', 'page_size', 'page_token', 'filter', ), 'list_documents': ('parent', 'page_size', 'page_token', ), 'list_engines': ('parent', 'page_size', 'page_token', 'filter', ), @@ -89,12 +91,14 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'list_target_sites': ('parent', 'page_size', 'page_token', ), 'provision_project': ('name', 'accept_data_use_terms', 'data_use_terms_version', ), 'purge_completion_suggestions': ('parent', ), - 'purge_documents': ('parent', 'filter', 'force', ), + 'purge_documents': ('parent', 'filter', 'gcs_source', 'inline_source', 'error_config', 'force', ), 'purge_suggestion_deny_list_entries': ('parent', ), + 'purge_user_events': ('parent', 'filter', 'force', ), 'rank': ('ranking_config', 'records', 'model', 'top_n', 'query', 'ignore_record_details_in_response', 'user_labels', ), 'recommend': ('serving_config', 'user_event', 'page_size', 'filter', 'validate_only', 'params', 'user_labels', ), 'recrawl_uris': ('site_search_engine', 'uris', ), 'search': ('serving_config', 'branch', 'query', 'image_query', 'page_size', 'page_token', 'offset', 'data_store_specs', 'filter', 'canonical_filter', 'order_by', 'user_info', 'language_code', 'facet_specs', 'boost_spec', 'params', 'query_expansion_spec', 'spell_correction_spec', 'user_pseudo_id', 'content_search_spec', 'safe_search', 'user_labels', 'search_as_you_type_spec', 'session', 'session_spec', ), + 'train_custom_model': ('data_store', 'gcs_training_input', 'model_type', 'error_config', 'model_id', ), 'update_control': ('control', 'update_mask', ), 'update_conversation': ('conversation', 'update_mask', ), 'update_data_store': ('data_store', 'update_mask', ), diff --git a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1alpha_keywords.py b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1alpha_keywords.py index e10372b3b1fa..d82dccd4ad51 100644 --- a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1alpha_keywords.py +++ b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1alpha_keywords.py @@ -41,6 +41,7 @@ class discoveryengineCallTransformer(cst.CSTTransformer): METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'answer_query': ('serving_config', 'query', 'session', 'safety_spec', 'related_questions_spec', 'answer_generation_spec', 'search_spec', 'query_understanding_spec', 'asynchronous_mode', 'user_pseudo_id', 'user_labels', ), 'batch_create_target_sites': ('parent', 'requests', ), + 'batch_get_documents_metadata': ('parent', 'matcher', ), 'batch_verify_target_sites': ('parent', ), 'check_grounding': ('grounding_config', 'answer_candidate', 'facts', 'grounding_spec', 'user_labels', ), 'collect_user_event': ('parent', 'user_event', 'uri', 'ets', ), diff --git a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py index 8a4765d046fe..f2f74d58b3d5 100644 --- a/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py +++ b/packages/google-cloud-discoveryengine/scripts/fixup_discoveryengine_v1beta_keywords.py @@ -41,6 +41,7 @@ class discoveryengineCallTransformer(cst.CSTTransformer): METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'answer_query': ('serving_config', 'query', 'session', 'safety_spec', 'related_questions_spec', 'answer_generation_spec', 'search_spec', 'query_understanding_spec', 'asynchronous_mode', 'user_pseudo_id', 'user_labels', ), 'batch_create_target_sites': ('parent', 'requests', ), + 'batch_get_documents_metadata': ('parent', 'matcher', ), 'batch_verify_target_sites': ('parent', ), 'check_grounding': ('grounding_config', 'answer_candidate', 'facts', 'grounding_spec', 'user_labels', ), 'collect_user_event': ('parent', 'user_event', 'uri', 'ets', ), @@ -106,14 +107,14 @@ class discoveryengineCallTransformer(cst.CSTTransformer): 'pause_engine': ('name', ), 'provision_project': ('name', 'accept_data_use_terms', 'data_use_terms_version', ), 'purge_completion_suggestions': ('parent', ), - 'purge_documents': ('parent', 'filter', 'force', ), + 'purge_documents': ('parent', 'filter', 'gcs_source', 'inline_source', 'error_config', 'force', ), 'purge_suggestion_deny_list_entries': ('parent', ), 'purge_user_events': ('parent', 'filter', 'force', ), 'rank': ('ranking_config', 'records', 'model', 'top_n', 'query', 'ignore_record_details_in_response', 'user_labels', ), 'recommend': ('serving_config', 'user_event', 'page_size', 'filter', 'validate_only', 'params', 'user_labels', ), 'recrawl_uris': ('site_search_engine', 'uris', ), 'resume_engine': ('name', ), - 'search': ('serving_config', 'branch', 'query', 'image_query', 'page_size', 'page_token', 'offset', 'data_store_specs', 'filter', 'canonical_filter', 'order_by', 'user_info', 'language_code', 'region_code', 'facet_specs', 'boost_spec', 'params', 'query_expansion_spec', 'spell_correction_spec', 'user_pseudo_id', 'content_search_spec', 'embedding_spec', 'ranking_expression', 'safe_search', 'user_labels', 'natural_language_query_understanding_spec', 'search_as_you_type_spec', 'session', 'session_spec', ), + 'search': ('serving_config', 'branch', 'query', 'image_query', 'page_size', 'page_token', 'offset', 'data_store_specs', 'filter', 'canonical_filter', 'order_by', 'user_info', 'language_code', 'region_code', 'facet_specs', 'boost_spec', 'params', 'query_expansion_spec', 'spell_correction_spec', 'user_pseudo_id', 'content_search_spec', 'embedding_spec', 'ranking_expression', 'safe_search', 'user_labels', 'natural_language_query_understanding_spec', 'search_as_you_type_spec', 'session', 'session_spec', 'relevance_threshold', ), 'train_custom_model': ('data_store', 'gcs_training_input', 'model_type', 'error_config', 'model_id', ), 'tune_engine': ('name', ), 'update_control': ('control', 'update_mask', ), diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py index 9459a96eceda..c3bd0fd5703a 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py @@ -3556,6 +3556,7 @@ def test_create_data_store_rest_required_fields( ( "create_advanced_site_search", "data_store_id", + "skip_default_schema_creation", ) ) jsonified_request.update(unset_fields) @@ -3622,6 +3623,7 @@ def test_create_data_store_rest_unset_required_fields(): ( "createAdvancedSiteSearch", "dataStoreId", + "skipDefaultSchemaCreation", ) ) & set( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py index 207850afb022..2ad97529f22b 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py @@ -45,10 +45,12 @@ from google.cloud.location import locations_pb2 from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.type import date_pb2 # type: ignore import grpc from grpc.experimental import aio @@ -3911,6 +3913,387 @@ async def test_purge_documents_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + document_service.BatchGetDocumentsMetadataRequest, + dict, + ], +) +def test_batch_get_documents_metadata(request_type, transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + response = client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = document_service.BatchGetDocumentsMetadataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) + + +def test_batch_get_documents_metadata_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.batch_get_documents_metadata() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest() + + +def test_batch_get_documents_metadata_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = document_service.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.batch_get_documents_metadata(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + +def test_batch_get_documents_metadata_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.batch_get_documents_metadata + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_get_documents_metadata + ] = mock_rpc + request = {} + client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_get_documents_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + response = await client.batch_get_documents_metadata() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest() + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.batch_get_documents_metadata + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.batch_get_documents_metadata + ] = mock_rpc + + request = {} + await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.batch_get_documents_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async( + transport: str = "grpc_asyncio", + request_type=document_service.BatchGetDocumentsMetadataRequest, +): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + response = await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = document_service.BatchGetDocumentsMetadataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async_from_dict(): + await test_batch_get_documents_metadata_async(request_type=dict) + + +def test_batch_get_documents_metadata_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.BatchGetDocumentsMetadataRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.BatchGetDocumentsMetadataRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_batch_get_documents_metadata_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_get_documents_metadata( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_batch_get_documents_metadata_flattened_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_flattened_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_get_documents_metadata( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_flattened_error_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3918,7 +4301,321 @@ async def test_purge_documents_field_headers_async(): dict, ], ) -def test_get_document_rest(request_type): +def test_get_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + json_data="json_data_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" + + +def test_get_document_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_document in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_document] = mock_rpc + + request = {} + client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_document_rest_required_fields( + request_type=document_service.GetDocumentRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document.Document() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_document_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_document_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_get_document" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_get_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_service.GetDocumentRequest.pb( + document_service.GetDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document.Document.to_json(document.Document()) + + request = document_service.GetDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document.Document() + + client.get_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_document_rest_bad_request( + transport: str = "rest", request_type=document_service.GetDocumentRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_document(request) + + +def test_get_document_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.Document() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + % client.transport._host, + args[1], + ) + + +def test_get_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_document( + document_service.GetDocumentRequest(), + name="name_value", + ) + + +def test_get_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.ListDocumentsRequest, + dict, + ], +) +def test_list_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3926,41 +4623,34 @@ def test_get_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.Document( - name="name_value", - id="id_value", - schema_id="schema_id_value", - parent_document_id="parent_document_id_value", - json_data="json_data_value", + return_value = document_service.ListDocumentsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_document(request) + response = client.list_documents(request) # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.schema_id == "schema_id_value" - assert response.parent_document_id == "parent_document_id_value" + assert isinstance(response, pagers.ListDocumentsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_document_rest_use_cached_wrapped_rpc(): +def test_list_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3974,35 +4664,35 @@ def test_get_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_document in client._transport._wrapped_methods + assert client._transport.list_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_document] = mock_rpc + client._transport._wrapped_methods[client._transport.list_documents] = mock_rpc request = {} - client.get_document(request) + client.list_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_document(request) + client.list_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_document_rest_required_fields( - request_type=document_service.GetDocumentRequest, +def test_list_documents_rest_required_fields( + request_type=document_service.ListDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4013,21 +4703,28 @@ def test_get_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_document._get_unset_required_fields(jsonified_request) + ).list_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_document._get_unset_required_fields(jsonified_request) + ).list_documents._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4036,7 +4733,7 @@ def test_get_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = document.Document() + return_value = document_service.ListDocumentsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4057,30 +4754,38 @@ def test_get_document_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_document(request) + response = client.list_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_document_rest_unset_required_fields(): +def test_list_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_document._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_documents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_document_rest_interceptors(null_interceptor): +def test_list_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4093,14 +4798,14 @@ def test_get_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_get_document" + transports.DocumentServiceRestInterceptor, "post_list_documents" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_get_document" + transports.DocumentServiceRestInterceptor, "pre_list_documents" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.GetDocumentRequest.pb( - document_service.GetDocumentRequest() + pb_message = document_service.ListDocumentsRequest.pb( + document_service.ListDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -4112,17 +4817,19 @@ def test_get_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = document.Document.to_json(document.Document()) + req.return_value._content = document_service.ListDocumentsResponse.to_json( + document_service.ListDocumentsResponse() + ) - request = document_service.GetDocumentRequest() + request = document_service.ListDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = document.Document() + post.return_value = document_service.ListDocumentsResponse() - client.get_document( + client.list_documents( request, metadata=[ ("key", "val"), @@ -4134,8 +4841,8 @@ def test_get_document_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_document_rest_bad_request( - transport: str = "rest", request_type=document_service.GetDocumentRequest +def test_list_documents_rest_bad_request( + transport: str = "rest", request_type=document_service.ListDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4144,7 +4851,7 @@ def test_get_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) @@ -4157,10 +4864,10 @@ def test_get_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_document(request) + client.list_documents(request) -def test_get_document_rest_flattened(): +def test_list_documents_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4169,16 +4876,16 @@ def test_get_document_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.Document() + return_value = document_service.ListDocumentsResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -4186,25 +4893,25 @@ def test_get_document_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_document(**mock_args) + client.list_documents(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + "%s/v1/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents" % client.transport._host, args[1], ) -def test_get_document_rest_flattened_error(transport: str = "rest"): +def test_list_documents_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4213,61 +4920,224 @@ def test_get_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_document( - document_service.GetDocumentRequest(), - name="name_value", + client.list_documents( + document_service.ListDocumentsRequest(), + parent="parent_value", + ) + + +def test_list_documents_rest_pager(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + document.Document(), + ], + next_page_token="abc", + ), + document_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + ], + next_page_token="ghi", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + document_service.ListDocumentsResponse.to_json(x) for x in response ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + + pager = client.list_documents(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, document.Document) for i in results) + + pages = list(client.list_documents(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.CreateDocumentRequest, + dict, + ], +) +def test_create_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + request_init["document"] = { + "struct_data": {"fields": {}}, + "json_data": "json_data_value", + "name": "name_value", + "id": "id_value", + "schema_id": "schema_id_value", + "content": { + "raw_bytes": b"raw_bytes_blob", + "uri": "uri_value", + "mime_type": "mime_type_value", + }, + "parent_document_id": "parent_document_id_value", + "derived_struct_data": {}, + "index_time": {"seconds": 751, "nanos": 543}, + "index_status": { + "index_time": {}, + "error_samples": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = document_service.CreateDocumentRequest.meta.fields["document"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] -def test_get_document_rest_error(): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["document"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - document_service.ListDocumentsRequest, - dict, - ], -) -def test_list_documents_rest(request_type): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" - } + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["document"][field])): + del request_init["document"][field][i][subfield] + else: + del request_init["document"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse( - next_page_token="next_page_token_value", + return_value = gcd_document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + json_data="json_data_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_documents(request) + response = client.create_document(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDocumentsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, gcd_document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" -def test_list_documents_rest_use_cached_wrapped_rpc(): +def test_create_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4281,35 +5151,36 @@ def test_list_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_documents in client._transport._wrapped_methods + assert client._transport.create_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_documents] = mock_rpc + client._transport._wrapped_methods[client._transport.create_document] = mock_rpc request = {} - client.list_documents(request) + client.create_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_documents(request) + client.create_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_documents_rest_required_fields( - request_type=document_service.ListDocumentsRequest, +def test_create_document_rest_required_fields( + request_type=document_service.CreateDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["document_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4317,31 +5188,32 @@ def test_list_documents_rest_required_fields( ) # verify fields with default values are dropped + assert "documentId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_documents._get_unset_required_fields(jsonified_request) + ).create_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "documentId" in jsonified_request + assert jsonified_request["documentId"] == request_init["document_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["documentId"] = "document_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_documents._get_unset_required_fields(jsonified_request) + ).create_document._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("document_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "documentId" in jsonified_request + assert jsonified_request["documentId"] == "document_id_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4350,7 +5222,7 @@ def test_list_documents_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse() + return_value = gcd_document.Document() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4362,47 +5234,55 @@ def test_list_documents_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_documents(request) + response = client.create_document(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "documentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_documents_rest_unset_required_fields(): +def test_create_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_documents._get_unset_required_fields({}) + unset_fields = transport.create_document._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("documentId",)) + & set( ( - "pageSize", - "pageToken", + "parent", + "document", + "documentId", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_documents_rest_interceptors(null_interceptor): +def test_create_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4415,14 +5295,14 @@ def test_list_documents_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_list_documents" + transports.DocumentServiceRestInterceptor, "post_create_document" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_list_documents" + transports.DocumentServiceRestInterceptor, "pre_create_document" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.ListDocumentsRequest.pb( - document_service.ListDocumentsRequest() + pb_message = document_service.CreateDocumentRequest.pb( + document_service.CreateDocumentRequest() ) transcode.return_value = { "method": "post", @@ -4434,19 +5314,19 @@ def test_list_documents_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = document_service.ListDocumentsResponse.to_json( - document_service.ListDocumentsResponse() + req.return_value._content = gcd_document.Document.to_json( + gcd_document.Document() ) - request = document_service.ListDocumentsRequest() + request = document_service.CreateDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = document_service.ListDocumentsResponse() + post.return_value = gcd_document.Document() - client.list_documents( + client.create_document( request, metadata=[ ("key", "val"), @@ -4458,8 +5338,8 @@ def test_list_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_documents_rest_bad_request( - transport: str = "rest", request_type=document_service.ListDocumentsRequest +def test_create_document_rest_bad_request( + transport: str = "rest", request_type=document_service.CreateDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4481,10 +5361,10 @@ def test_list_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_documents(request) + client.create_document(request) -def test_list_documents_rest_flattened(): +def test_create_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4493,7 +5373,7 @@ def test_list_documents_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse() + return_value = gcd_document.Document() # get arguments that satisfy an http rule for this method sample_request = { @@ -4503,6 +5383,16 @@ def test_list_documents_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", ) mock_args.update(sample_request) @@ -4510,12 +5400,12 @@ def test_list_documents_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_documents(**mock_args) + client.create_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -4528,94 +5418,45 @@ def test_list_documents_rest_flattened(): ) -def test_list_documents_rest_flattened_error(transport: str = "rest"): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_documents( - document_service.ListDocumentsRequest(), - parent="parent_value", - ) - - -def test_list_documents_rest_pager(transport: str = "rest"): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - document.Document(), - ], - next_page_token="abc", - ), - document_service.ListDocumentsResponse( - documents=[], - next_page_token="def", - ), - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - ], - next_page_token="ghi", - ), - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - document_service.ListDocumentsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" - } +def test_create_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - pager = client.list_documents(request=sample_request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_document( + document_service.CreateDocumentRequest(), + parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, document.Document) for i in results) - pages = list(client.list_documents(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_create_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - document_service.CreateDocumentRequest, + document_service.UpdateDocumentRequest, dict, ], ) -def test_create_document_rest(request_type): +def test_update_document_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4623,12 +5464,14 @@ def test_create_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } request_init["document"] = { "struct_data": {"fields": {}}, "json_data": "json_data_value", - "name": "name_value", + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5", "id": "id_value", "schema_id": "schema_id_value", "content": { @@ -4639,13 +5482,28 @@ def test_create_document_rest(request_type): "parent_document_id": "parent_document_id_value", "derived_struct_data": {}, "index_time": {"seconds": 751, "nanos": 543}, + "index_status": { + "index_time": {}, + "error_samples": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = document_service.CreateDocumentRequest.meta.fields["document"] + test_field = document_service.UpdateDocumentRequest.meta.fields["document"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -4729,7 +5587,7 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_document(request) + response = client.update_document(request) # Establish that the response is the type that we expect. assert isinstance(response, gcd_document.Document) @@ -4739,7 +5597,7 @@ def get_message_fields(field): assert response.parent_document_id == "parent_document_id_value" -def test_create_document_rest_use_cached_wrapped_rpc(): +def test_update_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4753,36 +5611,34 @@ def test_create_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_document in client._transport._wrapped_methods + assert client._transport.update_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_document] = mock_rpc + client._transport._wrapped_methods[client._transport.update_document] = mock_rpc request = {} - client.create_document(request) + client.update_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_document(request) + client.update_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_document_rest_required_fields( - request_type=document_service.CreateDocumentRequest, +def test_update_document_rest_required_fields( + request_type=document_service.UpdateDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["document_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4790,32 +5646,27 @@ def test_create_document_rest_required_fields( ) # verify fields with default values are dropped - assert "documentId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_document._get_unset_required_fields(jsonified_request) + ).update_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "documentId" in jsonified_request - assert jsonified_request["documentId"] == request_init["document_id"] - - jsonified_request["parent"] = "parent_value" - jsonified_request["documentId"] = "document_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_document._get_unset_required_fields(jsonified_request) + ).update_document._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("document_id",)) + assert not set(unset_fields) - set( + ( + "allow_missing", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "documentId" in jsonified_request - assert jsonified_request["documentId"] == "document_id_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4836,7 +5687,7 @@ def test_create_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -4852,39 +5703,32 @@ def test_create_document_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_document(request) + response = client.update_document(request) - expected_params = [ - ( - "documentId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_document_rest_unset_required_fields(): +def test_update_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_document._get_unset_required_fields({}) + unset_fields = transport.update_document._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("documentId",)) - & set( + set( ( - "parent", - "document", - "documentId", + "allowMissing", + "updateMask", ) ) + & set(("document",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_document_rest_interceptors(null_interceptor): +def test_update_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4897,14 +5741,14 @@ def test_create_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_create_document" + transports.DocumentServiceRestInterceptor, "post_update_document" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_create_document" + transports.DocumentServiceRestInterceptor, "pre_update_document" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.CreateDocumentRequest.pb( - document_service.CreateDocumentRequest() + pb_message = document_service.UpdateDocumentRequest.pb( + document_service.UpdateDocumentRequest() ) transcode.return_value = { "method": "post", @@ -4920,7 +5764,7 @@ def test_create_document_rest_interceptors(null_interceptor): gcd_document.Document() ) - request = document_service.CreateDocumentRequest() + request = document_service.UpdateDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -4928,7 +5772,7 @@ def test_create_document_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = gcd_document.Document() - client.create_document( + client.update_document( request, metadata=[ ("key", "val"), @@ -4940,8 +5784,8 @@ def test_create_document_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_document_rest_bad_request( - transport: str = "rest", request_type=document_service.CreateDocumentRequest +def test_update_document_rest_bad_request( + transport: str = "rest", request_type=document_service.UpdateDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4950,7 +5794,9 @@ def test_create_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } request = request_type(**request_init) @@ -4963,10 +5809,10 @@ def test_create_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_document(request) + client.update_document(request) -def test_create_document_rest_flattened(): +def test_update_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4979,12 +5825,13 @@ def test_create_document_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", document=gcd_document.Document( struct_data=struct_pb2.Struct( fields={ @@ -4994,7 +5841,7 @@ def test_create_document_rest_flattened(): } ) ), - document_id="document_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -5007,20 +5854,20 @@ def test_create_document_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_document(**mock_args) + client.update_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents" + "%s/v1/{document.name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" % client.transport._host, args[1], ) -def test_create_document_rest_flattened_error(transport: str = "rest"): +def test_update_document_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5029,9 +5876,8 @@ def test_create_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_document( - document_service.CreateDocumentRequest(), - parent="parent_value", + client.update_document( + document_service.UpdateDocumentRequest(), document=gcd_document.Document( struct_data=struct_pb2.Struct( fields={ @@ -5041,150 +5887,54 @@ def test_create_document_rest_flattened_error(transport: str = "rest"): } ) ), - document_id="document_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_document_rest_error(): +def test_update_document_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - document_service.UpdateDocumentRequest, - dict, - ], -) -def test_update_document_rest(request_type): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } - } - request_init["document"] = { - "struct_data": {"fields": {}}, - "json_data": "json_data_value", - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5", - "id": "id_value", - "schema_id": "schema_id_value", - "content": { - "raw_bytes": b"raw_bytes_blob", - "uri": "uri_value", - "mime_type": "mime_type_value", - }, - "parent_document_id": "parent_document_id_value", - "derived_struct_data": {}, - "index_time": {"seconds": 751, "nanos": 543}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = document_service.UpdateDocumentRequest.meta.fields["document"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["document"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["document"][field])): - del request_init["document"][field][i][subfield] - else: - del request_init["document"][field][subfield] + +@pytest.mark.parametrize( + "request_type", + [ + document_service.DeleteDocumentRequest, + dict, + ], +) +def test_delete_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcd_document.Document( - name="name_value", - id="id_value", - schema_id="schema_id_value", - parent_document_id="parent_document_id_value", - json_data="json_data_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_document(request) + response = client.delete_document(request) # Establish that the response is the type that we expect. - assert isinstance(response, gcd_document.Document) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.schema_id == "schema_id_value" - assert response.parent_document_id == "parent_document_id_value" + assert response is None -def test_update_document_rest_use_cached_wrapped_rpc(): +def test_delete_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5198,34 +5948,35 @@ def test_update_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_document in client._transport._wrapped_methods + assert client._transport.delete_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_document] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc request = {} - client.update_document(request) + client.delete_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_document(request) + client.delete_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_document_rest_required_fields( - request_type=document_service.UpdateDocumentRequest, +def test_delete_document_rest_required_fields( + request_type=document_service.DeleteDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5236,24 +5987,21 @@ def test_update_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_document._get_unset_required_fields(jsonified_request) + ).delete_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_document._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "allow_missing", - "update_mask", - ) - ) + ).delete_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5262,7 +6010,7 @@ def test_update_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gcd_document.Document() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5274,48 +6022,36 @@ def test_update_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_document(request) + response = client.delete_document(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_document_rest_unset_required_fields(): +def test_delete_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_document._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "allowMissing", - "updateMask", - ) - ) - & set(("document",)) - ) + unset_fields = transport.delete_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_document_rest_interceptors(null_interceptor): +def test_delete_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5328,14 +6064,11 @@ def test_update_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_update_document" - ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_update_document" + transports.DocumentServiceRestInterceptor, "pre_delete_document" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = document_service.UpdateDocumentRequest.pb( - document_service.UpdateDocumentRequest() + pb_message = document_service.DeleteDocumentRequest.pb( + document_service.DeleteDocumentRequest() ) transcode.return_value = { "method": "post", @@ -5347,19 +6080,15 @@ def test_update_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = gcd_document.Document.to_json( - gcd_document.Document() - ) - request = document_service.UpdateDocumentRequest() + request = document_service.DeleteDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gcd_document.Document() - client.update_document( + client.delete_document( request, metadata=[ ("key", "val"), @@ -5368,11 +6097,10 @@ def test_update_document_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_update_document_rest_bad_request( - transport: str = "rest", request_type=document_service.UpdateDocumentRequest +def test_delete_document_rest_bad_request( + transport: str = "rest", request_type=document_service.DeleteDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5381,9 +6109,7 @@ def test_update_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } request = request_type(**request_init) @@ -5396,10 +6122,10 @@ def test_update_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_document(request) + client.delete_document(request) -def test_update_document_rest_flattened(): +def test_delete_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5408,53 +6134,40 @@ def test_update_document_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcd_document.Document() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } # get truthy value for each flattened field mock_args = dict( - document=gcd_document.Document( - struct_data=struct_pb2.Struct( - fields={ - "key_value": struct_pb2.Value( - null_value=struct_pb2.NullValue.NULL_VALUE - ) - } - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_document(**mock_args) + client.delete_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{document.name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + "%s/v1/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" % client.transport._host, args[1], ) -def test_update_document_rest_flattened_error(transport: str = "rest"): +def test_delete_document_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5463,22 +6176,13 @@ def test_update_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_document( - document_service.UpdateDocumentRequest(), - document=gcd_document.Document( - struct_data=struct_pb2.Struct( - fields={ - "key_value": struct_pb2.Value( - null_value=struct_pb2.NullValue.NULL_VALUE - ) - } - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_document( + document_service.DeleteDocumentRequest(), + name="name_value", ) -def test_update_document_rest_error(): +def test_delete_document_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5487,11 +6191,11 @@ def test_update_document_rest_error(): @pytest.mark.parametrize( "request_type", [ - document_service.DeleteDocumentRequest, + import_config.ImportDocumentsRequest, dict, ], ) -def test_delete_document_rest(request_type): +def test_import_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5499,29 +6203,29 @@ def test_delete_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_document(request) + response = client.import_documents(request) # Establish that the response is the type that we expect. - assert response is None + assert response.operation.name == "operations/spam" -def test_delete_document_rest_use_cached_wrapped_rpc(): +def test_import_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5535,35 +6239,41 @@ def test_delete_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_document in client._transport._wrapped_methods + assert client._transport.import_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc + client._transport._wrapped_methods[ + client._transport.import_documents + ] = mock_rpc request = {} - client.delete_document(request) + client.import_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_document(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_document_rest_required_fields( - request_type=document_service.DeleteDocumentRequest, +def test_import_documents_rest_required_fields( + request_type=import_config.ImportDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5574,21 +6284,21 @@ def test_delete_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_document._get_unset_required_fields(jsonified_request) + ).import_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_document._get_unset_required_fields(jsonified_request) + ).import_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5597,7 +6307,7 @@ def test_delete_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5609,36 +6319,37 @@ def test_delete_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_document(request) + response = client.import_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_document_rest_unset_required_fields(): +def test_import_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_document._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.import_documents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_document_rest_interceptors(null_interceptor): +def test_import_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5651,11 +6362,16 @@ def test_delete_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_delete_document" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_import_documents" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_import_documents" ) as pre: pre.assert_not_called() - pb_message = document_service.DeleteDocumentRequest.pb( - document_service.DeleteDocumentRequest() + post.assert_not_called() + pb_message = import_config.ImportDocumentsRequest.pb( + import_config.ImportDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -5667,15 +6383,19 @@ def test_delete_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = document_service.DeleteDocumentRequest() + request = import_config.ImportDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - client.delete_document( + client.import_documents( request, metadata=[ ("key", "val"), @@ -5684,10 +6404,11 @@ def test_delete_document_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_document_rest_bad_request( - transport: str = "rest", request_type=document_service.DeleteDocumentRequest +def test_import_documents_rest_bad_request( + transport: str = "rest", request_type=import_config.ImportDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5696,7 +6417,7 @@ def test_delete_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) @@ -5709,67 +6430,10 @@ def test_delete_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_document(request) - - -def test_delete_document_rest_flattened(): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_document(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_document_rest_flattened_error(transport: str = "rest"): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_document( - document_service.DeleteDocumentRequest(), - name="name_value", - ) + client.import_documents(request) -def test_delete_document_rest_error(): +def test_import_documents_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5778,11 +6442,11 @@ def test_delete_document_rest_error(): @pytest.mark.parametrize( "request_type", [ - import_config.ImportDocumentsRequest, + purge_config.PurgeDocumentsRequest, dict, ], ) -def test_import_documents_rest(request_type): +def test_purge_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5806,13 +6470,13 @@ def test_import_documents_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_documents(request) + response = client.purge_documents(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_import_documents_rest_use_cached_wrapped_rpc(): +def test_purge_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5826,19 +6490,17 @@ def test_import_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.import_documents in client._transport._wrapped_methods + assert client._transport.purge_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.import_documents - ] = mock_rpc + client._transport._wrapped_methods[client._transport.purge_documents] = mock_rpc request = {} - client.import_documents(request) + client.purge_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -5847,20 +6509,21 @@ def test_import_documents_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.import_documents(request) + client.purge_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_import_documents_rest_required_fields( - request_type=import_config.ImportDocumentsRequest, +def test_purge_documents_rest_required_fields( + request_type=purge_config.PurgeDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["filter"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5871,21 +6534,24 @@ def test_import_documents_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_documents._get_unset_required_fields(jsonified_request) + ).purge_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" + jsonified_request["filter"] = "filter_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_documents._get_unset_required_fields(jsonified_request) + ).purge_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "filter" in jsonified_request + assert jsonified_request["filter"] == "filter_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5919,24 +6585,32 @@ def test_import_documents_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_documents(request) + response = client.purge_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_import_documents_rest_unset_required_fields(): +def test_purge_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.import_documents._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.purge_documents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "filter", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_documents_rest_interceptors(null_interceptor): +def test_purge_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5951,14 +6625,14 @@ def test_import_documents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_import_documents" + transports.DocumentServiceRestInterceptor, "post_purge_documents" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_import_documents" + transports.DocumentServiceRestInterceptor, "pre_purge_documents" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = import_config.ImportDocumentsRequest.pb( - import_config.ImportDocumentsRequest() + pb_message = purge_config.PurgeDocumentsRequest.pb( + purge_config.PurgeDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -5974,7 +6648,7 @@ def test_import_documents_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = import_config.ImportDocumentsRequest() + request = purge_config.PurgeDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -5982,7 +6656,7 @@ def test_import_documents_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.import_documents( + client.purge_documents( request, metadata=[ ("key", "val"), @@ -5994,8 +6668,8 @@ def test_import_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_import_documents_rest_bad_request( - transport: str = "rest", request_type=import_config.ImportDocumentsRequest +def test_purge_documents_rest_bad_request( + transport: str = "rest", request_type=purge_config.PurgeDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6017,10 +6691,10 @@ def test_import_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.import_documents(request) + client.purge_documents(request) -def test_import_documents_rest_error(): +def test_purge_documents_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6029,11 +6703,11 @@ def test_import_documents_rest_error(): @pytest.mark.parametrize( "request_type", [ - purge_config.PurgeDocumentsRequest, + document_service.BatchGetDocumentsMetadataRequest, dict, ], ) -def test_purge_documents_rest(request_type): +def test_batch_get_documents_metadata_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6048,22 +6722,26 @@ def test_purge_documents_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = document_service.BatchGetDocumentsMetadataResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.purge_documents(request) + response = client.batch_get_documents_metadata(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) -def test_purge_documents_rest_use_cached_wrapped_rpc(): +def test_batch_get_documents_metadata_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6077,40 +6755,40 @@ def test_purge_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.purge_documents in client._transport._wrapped_methods + assert ( + client._transport.batch_get_documents_metadata + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.purge_documents] = mock_rpc + client._transport._wrapped_methods[ + client._transport.batch_get_documents_metadata + ] = mock_rpc request = {} - client.purge_documents(request) + client.batch_get_documents_metadata(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.purge_documents(request) + client.batch_get_documents_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_purge_documents_rest_required_fields( - request_type=purge_config.PurgeDocumentsRequest, +def test_batch_get_documents_metadata_rest_required_fields( + request_type=document_service.BatchGetDocumentsMetadataRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" - request_init["filter"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6121,24 +6799,23 @@ def test_purge_documents_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).purge_documents._get_unset_required_fields(jsonified_request) + ).batch_get_documents_metadata._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" - jsonified_request["filter"] = "filter_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).purge_documents._get_unset_required_fields(jsonified_request) + ).batch_get_documents_metadata._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("matcher",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "filter" in jsonified_request - assert jsonified_request["filter"] == "filter_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6147,7 +6824,7 @@ def test_purge_documents_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = document_service.BatchGetDocumentsMetadataResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6159,45 +6836,49 @@ def test_purge_documents_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.purge_documents(request) + response = client.batch_get_documents_metadata(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_purge_documents_rest_unset_required_fields(): +def test_batch_get_documents_metadata_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.purge_documents._get_unset_required_fields({}) + unset_fields = transport.batch_get_documents_metadata._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set(("matcher",)) & set( ( "parent", - "filter", + "matcher", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_purge_documents_rest_interceptors(null_interceptor): +def test_batch_get_documents_metadata_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6210,16 +6891,14 @@ def test_purge_documents_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_purge_documents" + transports.DocumentServiceRestInterceptor, "post_batch_get_documents_metadata" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_purge_documents" + transports.DocumentServiceRestInterceptor, "pre_batch_get_documents_metadata" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = purge_config.PurgeDocumentsRequest.pb( - purge_config.PurgeDocumentsRequest() + pb_message = document_service.BatchGetDocumentsMetadataRequest.pb( + document_service.BatchGetDocumentsMetadataRequest() ) transcode.return_value = { "method": "post", @@ -6231,19 +6910,21 @@ def test_purge_documents_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + document_service.BatchGetDocumentsMetadataResponse.to_json( + document_service.BatchGetDocumentsMetadataResponse() + ) ) - request = purge_config.PurgeDocumentsRequest() + request = document_service.BatchGetDocumentsMetadataRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = document_service.BatchGetDocumentsMetadataResponse() - client.purge_documents( + client.batch_get_documents_metadata( request, metadata=[ ("key", "val"), @@ -6255,8 +6936,9 @@ def test_purge_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_purge_documents_rest_bad_request( - transport: str = "rest", request_type=purge_config.PurgeDocumentsRequest +def test_batch_get_documents_metadata_rest_bad_request( + transport: str = "rest", + request_type=document_service.BatchGetDocumentsMetadataRequest, ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6278,10 +6960,71 @@ def test_purge_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.purge_documents(request) + client.batch_get_documents_metadata(request) -def test_purge_documents_rest_error(): +def test_batch_get_documents_metadata_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_service.BatchGetDocumentsMetadataResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.batch_get_documents_metadata(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/dataStores/*/branches/*}/batchGetDocumentsMetadata" + % client.transport._host, + args[1], + ) + + +def test_batch_get_documents_metadata_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + +def test_batch_get_documents_metadata_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6433,6 +7176,7 @@ def test_document_service_base_transport(): "delete_document", "import_documents", "purge_documents", + "batch_get_documents_metadata", "get_operation", "cancel_operation", "list_operations", @@ -6737,6 +7481,9 @@ def test_document_service_client_transport_session_collision(transport_name): session1 = client1.transport.purge_documents._session session2 = client2.transport.purge_documents._session assert session1 != session2 + session1 = client1.transport.batch_get_documents_metadata._session + session2 = client2.transport.batch_get_documents_metadata._session + assert session1 != session2 def test_document_service_grpc_transport_channel(): diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_tuning_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_tuning_service.py new file mode 100644 index 000000000000..7f47ed810349 --- /dev/null +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_tuning_service.py @@ -0,0 +1,3772 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.discoveryengine_v1.services.search_tuning_service import ( + SearchTuningServiceAsyncClient, + SearchTuningServiceClient, + transports, +) +from google.cloud.discoveryengine_v1.types import ( + custom_tuning_model, + import_config, + search_tuning_service, +) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SearchTuningServiceClient._get_default_mtls_endpoint(None) is None + assert ( + SearchTuningServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + SearchTuningServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + SearchTuningServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SearchTuningServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SearchTuningServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert SearchTuningServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert SearchTuningServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert SearchTuningServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + SearchTuningServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert SearchTuningServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert SearchTuningServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert SearchTuningServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + SearchTuningServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert SearchTuningServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert SearchTuningServiceClient._get_client_cert_source(None, False) is None + assert ( + SearchTuningServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + SearchTuningServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + SearchTuningServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + SearchTuningServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + SearchTuningServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceClient), +) +@mock.patch.object( + SearchTuningServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = SearchTuningServiceClient._DEFAULT_UNIVERSE + default_endpoint = SearchTuningServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = SearchTuningServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + SearchTuningServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + SearchTuningServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == SearchTuningServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SearchTuningServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + SearchTuningServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == SearchTuningServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SearchTuningServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == SearchTuningServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SearchTuningServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + SearchTuningServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + SearchTuningServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + SearchTuningServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + SearchTuningServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + SearchTuningServiceClient._get_universe_domain(None, None) + == SearchTuningServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + SearchTuningServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SearchTuningServiceClient, "grpc"), + (SearchTuningServiceAsyncClient, "grpc_asyncio"), + (SearchTuningServiceClient, "rest"), + ], +) +def test_search_tuning_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.SearchTuningServiceGrpcTransport, "grpc"), + (transports.SearchTuningServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.SearchTuningServiceRestTransport, "rest"), + ], +) +def test_search_tuning_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SearchTuningServiceClient, "grpc"), + (SearchTuningServiceAsyncClient, "grpc_asyncio"), + (SearchTuningServiceClient, "rest"), + ], +) +def test_search_tuning_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +def test_search_tuning_service_client_get_transport_class(): + transport = SearchTuningServiceClient.get_transport_class() + available_transports = [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceRestTransport, + ] + assert transport in available_transports + + transport = SearchTuningServiceClient.get_transport_class("grpc") + assert transport == transports.SearchTuningServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + SearchTuningServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceClient), +) +@mock.patch.object( + SearchTuningServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceAsyncClient), +) +def test_search_tuning_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SearchTuningServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SearchTuningServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + "true", + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + "false", + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceRestTransport, + "rest", + "true", + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + SearchTuningServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceClient), +) +@mock.patch.object( + SearchTuningServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_search_tuning_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [SearchTuningServiceClient, SearchTuningServiceAsyncClient] +) +@mock.patch.object( + SearchTuningServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchTuningServiceClient), +) +@mock.patch.object( + SearchTuningServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SearchTuningServiceAsyncClient), +) +def test_search_tuning_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [SearchTuningServiceClient, SearchTuningServiceAsyncClient] +) +@mock.patch.object( + SearchTuningServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceClient), +) +@mock.patch.object( + SearchTuningServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SearchTuningServiceAsyncClient), +) +def test_search_tuning_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = SearchTuningServiceClient._DEFAULT_UNIVERSE + default_endpoint = SearchTuningServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = SearchTuningServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceRestTransport, + "rest", + ), + ], +) +def test_search_tuning_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + SearchTuningServiceClient, + transports.SearchTuningServiceRestTransport, + "rest", + None, + ), + ], +) +def test_search_tuning_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_search_tuning_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.discoveryengine_v1.services.search_tuning_service.transports.SearchTuningServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = SearchTuningServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SearchTuningServiceClient, + transports.SearchTuningServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_search_tuning_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + search_tuning_service.TrainCustomModelRequest, + dict, + ], +) +def test_train_custom_model(request_type, transport: str = "grpc"): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = search_tuning_service.TrainCustomModelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_train_custom_model_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.train_custom_model() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.TrainCustomModelRequest() + + +def test_train_custom_model_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = search_tuning_service.TrainCustomModelRequest( + data_store="data_store_value", + model_type="model_type_value", + model_id="model_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.train_custom_model(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.TrainCustomModelRequest( + data_store="data_store_value", + model_type="model_type_value", + model_id="model_id_value", + ) + + +def test_train_custom_model_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.train_custom_model in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.train_custom_model + ] = mock_rpc + request = {} + client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.train_custom_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_train_custom_model_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.train_custom_model() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.TrainCustomModelRequest() + + +@pytest.mark.asyncio +async def test_train_custom_model_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.train_custom_model + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.train_custom_model + ] = mock_rpc + + request = {} + await client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.train_custom_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_train_custom_model_async( + transport: str = "grpc_asyncio", + request_type=search_tuning_service.TrainCustomModelRequest, +): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = search_tuning_service.TrainCustomModelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_train_custom_model_async_from_dict(): + await test_train_custom_model_async(request_type=dict) + + +def test_train_custom_model_field_headers(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = search_tuning_service.TrainCustomModelRequest() + + request.data_store = "data_store_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store=data_store_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_train_custom_model_field_headers_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = search_tuning_service.TrainCustomModelRequest() + + request.data_store = "data_store_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.train_custom_model), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store=data_store_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + search_tuning_service.ListCustomModelsRequest, + dict, + ], +) +def test_list_custom_models(request_type, transport: str = "grpc"): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = search_tuning_service.ListCustomModelsResponse() + response = client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = search_tuning_service.ListCustomModelsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, search_tuning_service.ListCustomModelsResponse) + + +def test_list_custom_models_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_custom_models() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.ListCustomModelsRequest() + + +def test_list_custom_models_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = search_tuning_service.ListCustomModelsRequest( + data_store="data_store_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_custom_models(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.ListCustomModelsRequest( + data_store="data_store_value", + ) + + +def test_list_custom_models_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_custom_models in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_custom_models + ] = mock_rpc + request = {} + client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_custom_models(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_custom_models_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + search_tuning_service.ListCustomModelsResponse() + ) + response = await client.list_custom_models() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == search_tuning_service.ListCustomModelsRequest() + + +@pytest.mark.asyncio +async def test_list_custom_models_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_custom_models + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_custom_models + ] = mock_rpc + + request = {} + await client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_custom_models(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_custom_models_async( + transport: str = "grpc_asyncio", + request_type=search_tuning_service.ListCustomModelsRequest, +): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + search_tuning_service.ListCustomModelsResponse() + ) + response = await client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = search_tuning_service.ListCustomModelsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, search_tuning_service.ListCustomModelsResponse) + + +@pytest.mark.asyncio +async def test_list_custom_models_async_from_dict(): + await test_list_custom_models_async(request_type=dict) + + +def test_list_custom_models_field_headers(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = search_tuning_service.ListCustomModelsRequest() + + request.data_store = "data_store_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + call.return_value = search_tuning_service.ListCustomModelsResponse() + client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store=data_store_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_custom_models_field_headers_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = search_tuning_service.ListCustomModelsRequest() + + request.data_store = "data_store_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_custom_models), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + search_tuning_service.ListCustomModelsResponse() + ) + await client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_store=data_store_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + search_tuning_service.TrainCustomModelRequest, + dict, + ], +) +def test_train_custom_model_rest(request_type): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.train_custom_model(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_train_custom_model_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.train_custom_model in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.train_custom_model + ] = mock_rpc + + request = {} + client.train_custom_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.train_custom_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_train_custom_model_rest_required_fields( + request_type=search_tuning_service.TrainCustomModelRequest, +): + transport_class = transports.SearchTuningServiceRestTransport + + request_init = {} + request_init["data_store"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).train_custom_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["dataStore"] = "data_store_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).train_custom_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "dataStore" in jsonified_request + assert jsonified_request["dataStore"] == "data_store_value" + + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.train_custom_model(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_train_custom_model_rest_unset_required_fields(): + transport = transports.SearchTuningServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.train_custom_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("dataStore",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_train_custom_model_rest_interceptors(null_interceptor): + transport = transports.SearchTuningServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SearchTuningServiceRestInterceptor(), + ) + client = SearchTuningServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.SearchTuningServiceRestInterceptor, "post_train_custom_model" + ) as post, mock.patch.object( + transports.SearchTuningServiceRestInterceptor, "pre_train_custom_model" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = search_tuning_service.TrainCustomModelRequest.pb( + search_tuning_service.TrainCustomModelRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = search_tuning_service.TrainCustomModelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.train_custom_model( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_train_custom_model_rest_bad_request( + transport: str = "rest", request_type=search_tuning_service.TrainCustomModelRequest +): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.train_custom_model(request) + + +def test_train_custom_model_rest_error(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + search_tuning_service.ListCustomModelsRequest, + dict, + ], +) +def test_list_custom_models_rest(request_type): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = search_tuning_service.ListCustomModelsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = search_tuning_service.ListCustomModelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_custom_models(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, search_tuning_service.ListCustomModelsResponse) + + +def test_list_custom_models_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_custom_models in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_custom_models + ] = mock_rpc + + request = {} + client.list_custom_models(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_custom_models(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_custom_models_rest_required_fields( + request_type=search_tuning_service.ListCustomModelsRequest, +): + transport_class = transports.SearchTuningServiceRestTransport + + request_init = {} + request_init["data_store"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_custom_models._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["dataStore"] = "data_store_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_custom_models._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "dataStore" in jsonified_request + assert jsonified_request["dataStore"] == "data_store_value" + + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = search_tuning_service.ListCustomModelsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = search_tuning_service.ListCustomModelsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_custom_models(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_custom_models_rest_unset_required_fields(): + transport = transports.SearchTuningServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_custom_models._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("dataStore",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_custom_models_rest_interceptors(null_interceptor): + transport = transports.SearchTuningServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SearchTuningServiceRestInterceptor(), + ) + client = SearchTuningServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SearchTuningServiceRestInterceptor, "post_list_custom_models" + ) as post, mock.patch.object( + transports.SearchTuningServiceRestInterceptor, "pre_list_custom_models" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = search_tuning_service.ListCustomModelsRequest.pb( + search_tuning_service.ListCustomModelsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + search_tuning_service.ListCustomModelsResponse.to_json( + search_tuning_service.ListCustomModelsResponse() + ) + ) + + request = search_tuning_service.ListCustomModelsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = search_tuning_service.ListCustomModelsResponse() + + client.list_custom_models( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_custom_models_rest_bad_request( + transport: str = "rest", request_type=search_tuning_service.ListCustomModelsRequest +): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "data_store": "projects/sample1/locations/sample2/collections/sample3/dataStores/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_custom_models(request) + + +def test_list_custom_models_rest_error(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SearchTuningServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SearchTuningServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SearchTuningServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SearchTuningServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SearchTuningServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SearchTuningServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SearchTuningServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + transports.SearchTuningServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = SearchTuningServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SearchTuningServiceGrpcTransport, + ) + + +def test_search_tuning_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SearchTuningServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_search_tuning_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.discoveryengine_v1.services.search_tuning_service.transports.SearchTuningServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.SearchTuningServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "train_custom_model", + "list_custom_models", + "get_operation", + "cancel_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_search_tuning_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.discoveryengine_v1.services.search_tuning_service.transports.SearchTuningServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SearchTuningServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_search_tuning_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.discoveryengine_v1.services.search_tuning_service.transports.SearchTuningServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SearchTuningServiceTransport() + adc.assert_called_once() + + +def test_search_tuning_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SearchTuningServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + ], +) +def test_search_tuning_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + transports.SearchTuningServiceRestTransport, + ], +) +def test_search_tuning_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SearchTuningServiceGrpcTransport, grpc_helpers), + (transports.SearchTuningServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_search_tuning_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "discoveryengine.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="discoveryengine.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + ], +) +def test_search_tuning_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_search_tuning_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.SearchTuningServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_search_tuning_service_rest_lro_client(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_search_tuning_service_host_no_port(transport_name): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_search_tuning_service_host_with_port(transport_name): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="discoveryengine.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "discoveryengine.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://discoveryengine.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_search_tuning_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SearchTuningServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SearchTuningServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.train_custom_model._session + session2 = client2.transport.train_custom_model._session + assert session1 != session2 + session1 = client1.transport.list_custom_models._session + session2 = client2.transport.list_custom_models._session + assert session1 != session2 + + +def test_search_tuning_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SearchTuningServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_search_tuning_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SearchTuningServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + ], +) +def test_search_tuning_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SearchTuningServiceGrpcTransport, + transports.SearchTuningServiceGrpcAsyncIOTransport, + ], +) +def test_search_tuning_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_search_tuning_service_grpc_lro_client(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_search_tuning_service_grpc_lro_async_client(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_custom_tuning_model_path(): + project = "squid" + location = "clam" + data_store = "whelk" + custom_tuning_model = "octopus" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}/customTuningModels/{custom_tuning_model}".format( + project=project, + location=location, + data_store=data_store, + custom_tuning_model=custom_tuning_model, + ) + actual = SearchTuningServiceClient.custom_tuning_model_path( + project, location, data_store, custom_tuning_model + ) + assert expected == actual + + +def test_parse_custom_tuning_model_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "data_store": "cuttlefish", + "custom_tuning_model": "mussel", + } + path = SearchTuningServiceClient.custom_tuning_model_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_custom_tuning_model_path(path) + assert expected == actual + + +def test_data_store_path(): + project = "winkle" + location = "nautilus" + data_store = "scallop" + expected = "projects/{project}/locations/{location}/dataStores/{data_store}".format( + project=project, + location=location, + data_store=data_store, + ) + actual = SearchTuningServiceClient.data_store_path(project, location, data_store) + assert expected == actual + + +def test_parse_data_store_path(): + expected = { + "project": "abalone", + "location": "squid", + "data_store": "clam", + } + path = SearchTuningServiceClient.data_store_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_data_store_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = SearchTuningServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = SearchTuningServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = SearchTuningServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = SearchTuningServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = SearchTuningServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = SearchTuningServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = SearchTuningServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = SearchTuningServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = SearchTuningServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = SearchTuningServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SearchTuningServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.SearchTuningServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.SearchTuningServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = SearchTuningServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_cancel_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.CancelOperationRequest +): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "{}" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.GetOperationRequest +): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + transport: str = "rest", request_type=operations_pb2.ListOperationsRequest +): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_cancel_operation(transport: str = "grpc"): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = SearchTuningServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = SearchTuningServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (SearchTuningServiceClient, transports.SearchTuningServiceGrpcTransport), + ( + SearchTuningServiceAsyncClient, + transports.SearchTuningServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py index 08b5d51e76e2..c56cf06b35ec 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py @@ -67,6 +67,7 @@ from google.cloud.discoveryengine_v1.types import ( common, import_config, + purge_config, user_event, user_event_service, ) @@ -1809,6 +1810,309 @@ async def test_collect_user_event_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + purge_config.PurgeUserEventsRequest, + dict, + ], +) +def test_purge_user_events(request_type, transport: str = "grpc"): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_user_events), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.purge_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = purge_config.PurgeUserEventsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_purge_user_events_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_user_events), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.purge_user_events() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeUserEventsRequest() + + +def test_purge_user_events_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = purge_config.PurgeUserEventsRequest( + parent="parent_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_user_events), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.purge_user_events(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeUserEventsRequest( + parent="parent_value", + filter="filter_value", + ) + + +def test_purge_user_events_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.purge_user_events in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.purge_user_events + ] = mock_rpc + request = {} + client.purge_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.purge_user_events(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_purge_user_events_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_user_events), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.purge_user_events() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == purge_config.PurgeUserEventsRequest() + + +@pytest.mark.asyncio +async def test_purge_user_events_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.purge_user_events + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.purge_user_events + ] = mock_rpc + + request = {} + await client.purge_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.purge_user_events(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_purge_user_events_async( + transport: str = "grpc_asyncio", request_type=purge_config.PurgeUserEventsRequest +): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_user_events), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.purge_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = purge_config.PurgeUserEventsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_purge_user_events_async_from_dict(): + await test_purge_user_events_async(request_type=dict) + + +def test_purge_user_events_field_headers(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = purge_config.PurgeUserEventsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_user_events), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.purge_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_purge_user_events_field_headers_async(): + client = UserEventServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = purge_config.PurgeUserEventsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.purge_user_events), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.purge_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + @pytest.mark.parametrize( "request_type", [ @@ -2151,6 +2455,7 @@ def test_write_user_event_rest(request_type): "uri": "uri_value", "quantity": 895, "promotion_ids": ["promotion_ids_value1", "promotion_ids_value2"], + "joined": True, } ], "panel": { @@ -2792,6 +3097,265 @@ def test_collect_user_event_rest_error(): ) +@pytest.mark.parametrize( + "request_type", + [ + purge_config.PurgeUserEventsRequest, + dict, + ], +) +def test_purge_user_events_rest(request_type): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.purge_user_events(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_purge_user_events_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.purge_user_events in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.purge_user_events + ] = mock_rpc + + request = {} + client.purge_user_events(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.purge_user_events(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_purge_user_events_rest_required_fields( + request_type=purge_config.PurgeUserEventsRequest, +): + transport_class = transports.UserEventServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["filter"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_user_events._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["filter"] = "filter_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).purge_user_events._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "filter" in jsonified_request + assert jsonified_request["filter"] == "filter_value" + + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.purge_user_events(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_purge_user_events_rest_unset_required_fields(): + transport = transports.UserEventServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.purge_user_events._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "filter", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_purge_user_events_rest_interceptors(null_interceptor): + transport = transports.UserEventServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.UserEventServiceRestInterceptor(), + ) + client = UserEventServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.UserEventServiceRestInterceptor, "post_purge_user_events" + ) as post, mock.patch.object( + transports.UserEventServiceRestInterceptor, "pre_purge_user_events" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = purge_config.PurgeUserEventsRequest.pb( + purge_config.PurgeUserEventsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = purge_config.PurgeUserEventsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.purge_user_events( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_purge_user_events_rest_bad_request( + transport: str = "rest", request_type=purge_config.PurgeUserEventsRequest +): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/dataStores/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.purge_user_events(request) + + +def test_purge_user_events_rest_error(): + client = UserEventServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3182,6 +3746,7 @@ def test_user_event_service_base_transport(): methods = ( "write_user_event", "collect_user_event", + "purge_user_events", "import_user_events", "get_operation", "cancel_operation", @@ -3472,6 +4037,9 @@ def test_user_event_service_client_transport_session_collision(transport_name): session1 = client1.transport.collect_user_event._session session2 = client2.transport.collect_user_event._session assert session1 != session2 + session1 = client1.transport.purge_user_events._session + session2 = client2.transport.purge_user_events._session + assert session1 != session2 session1 = client1.transport.import_user_events._session session2 = client2.transport.import_user_events._session assert session1 != session2 diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py index 961c537481ba..b54085a025f3 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py @@ -4181,6 +4181,10 @@ def test_create_data_store_rest(request_type): "external_idp_config": {"workforce_pool_name": "workforce_pool_name_value"}, }, "acl_enabled": True, + "workspace_config": { + "type_": 1, + "dasher_customer_id": "dasher_customer_id_value", + }, "document_processing_config": { "name": "name_value", "chunking_config": { @@ -5653,6 +5657,10 @@ def test_update_data_store_rest(request_type): "external_idp_config": {"workforce_pool_name": "workforce_pool_name_value"}, }, "acl_enabled": True, + "workspace_config": { + "type_": 1, + "dasher_customer_id": "dasher_customer_id_value", + }, "document_processing_config": { "name": "name_value", "chunking_config": { diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py index 8adf368a1a3b..b7b3e0915308 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py @@ -45,10 +45,12 @@ from google.cloud.location import locations_pb2 from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.type import date_pb2 # type: ignore import grpc from grpc.experimental import aio @@ -4302,6 +4304,387 @@ async def test_get_processed_document_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + document_service.BatchGetDocumentsMetadataRequest, + dict, + ], +) +def test_batch_get_documents_metadata(request_type, transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + response = client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = document_service.BatchGetDocumentsMetadataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) + + +def test_batch_get_documents_metadata_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.batch_get_documents_metadata() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest() + + +def test_batch_get_documents_metadata_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = document_service.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.batch_get_documents_metadata(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + +def test_batch_get_documents_metadata_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.batch_get_documents_metadata + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_get_documents_metadata + ] = mock_rpc + request = {} + client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_get_documents_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + response = await client.batch_get_documents_metadata() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest() + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.batch_get_documents_metadata + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.batch_get_documents_metadata + ] = mock_rpc + + request = {} + await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.batch_get_documents_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async( + transport: str = "grpc_asyncio", + request_type=document_service.BatchGetDocumentsMetadataRequest, +): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + response = await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = document_service.BatchGetDocumentsMetadataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async_from_dict(): + await test_batch_get_documents_metadata_async(request_type=dict) + + +def test_batch_get_documents_metadata_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.BatchGetDocumentsMetadataRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.BatchGetDocumentsMetadataRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_batch_get_documents_metadata_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_get_documents_metadata( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_batch_get_documents_metadata_flattened_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_flattened_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_get_documents_metadata( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_flattened_error_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + @pytest.mark.parametrize( "request_type", [ @@ -4309,7 +4692,321 @@ async def test_get_processed_document_flattened_error_async(): dict, ], ) -def test_get_document_rest(request_type): +def test_get_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + json_data="json_data_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" + + +def test_get_document_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_document in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_document] = mock_rpc + + request = {} + client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_document_rest_required_fields( + request_type=document_service.GetDocumentRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document.Document() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_document_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_document_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_get_document" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_get_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_service.GetDocumentRequest.pb( + document_service.GetDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document.Document.to_json(document.Document()) + + request = document_service.GetDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document.Document() + + client.get_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_document_rest_bad_request( + transport: str = "rest", request_type=document_service.GetDocumentRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_document(request) + + +def test_get_document_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.Document() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + % client.transport._host, + args[1], + ) + + +def test_get_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_document( + document_service.GetDocumentRequest(), + name="name_value", + ) + + +def test_get_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.ListDocumentsRequest, + dict, + ], +) +def test_list_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4317,41 +5014,34 @@ def test_get_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.Document( - name="name_value", - id="id_value", - schema_id="schema_id_value", - parent_document_id="parent_document_id_value", - json_data="json_data_value", + return_value = document_service.ListDocumentsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_document(request) + response = client.list_documents(request) # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.schema_id == "schema_id_value" - assert response.parent_document_id == "parent_document_id_value" + assert isinstance(response, pagers.ListDocumentsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_document_rest_use_cached_wrapped_rpc(): +def test_list_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4365,35 +5055,35 @@ def test_get_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_document in client._transport._wrapped_methods + assert client._transport.list_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_document] = mock_rpc + client._transport._wrapped_methods[client._transport.list_documents] = mock_rpc request = {} - client.get_document(request) + client.list_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_document(request) + client.list_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_document_rest_required_fields( - request_type=document_service.GetDocumentRequest, +def test_list_documents_rest_required_fields( + request_type=document_service.ListDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4404,21 +5094,28 @@ def test_get_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_document._get_unset_required_fields(jsonified_request) + ).list_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_document._get_unset_required_fields(jsonified_request) + ).list_documents._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4427,7 +5124,7 @@ def test_get_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = document.Document() + return_value = document_service.ListDocumentsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4448,30 +5145,38 @@ def test_get_document_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_document(request) + response = client.list_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_document_rest_unset_required_fields(): +def test_list_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_document._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_documents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_document_rest_interceptors(null_interceptor): +def test_list_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4484,14 +5189,14 @@ def test_get_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_get_document" + transports.DocumentServiceRestInterceptor, "post_list_documents" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_get_document" + transports.DocumentServiceRestInterceptor, "pre_list_documents" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.GetDocumentRequest.pb( - document_service.GetDocumentRequest() + pb_message = document_service.ListDocumentsRequest.pb( + document_service.ListDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -4503,17 +5208,19 @@ def test_get_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = document.Document.to_json(document.Document()) + req.return_value._content = document_service.ListDocumentsResponse.to_json( + document_service.ListDocumentsResponse() + ) - request = document_service.GetDocumentRequest() + request = document_service.ListDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = document.Document() + post.return_value = document_service.ListDocumentsResponse() - client.get_document( + client.list_documents( request, metadata=[ ("key", "val"), @@ -4525,8 +5232,8 @@ def test_get_document_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_document_rest_bad_request( - transport: str = "rest", request_type=document_service.GetDocumentRequest +def test_list_documents_rest_bad_request( + transport: str = "rest", request_type=document_service.ListDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4535,7 +5242,7 @@ def test_get_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) @@ -4548,10 +5255,10 @@ def test_get_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_document(request) + client.list_documents(request) -def test_get_document_rest_flattened(): +def test_list_documents_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4560,16 +5267,16 @@ def test_get_document_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.Document() + return_value = document_service.ListDocumentsResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -4577,25 +5284,25 @@ def test_get_document_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_document(**mock_args) + client.list_documents(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + "%s/v1alpha/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents" % client.transport._host, args[1], ) -def test_get_document_rest_flattened_error(transport: str = "rest"): +def test_list_documents_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4604,26 +5311,85 @@ def test_get_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_document( - document_service.GetDocumentRequest(), - name="name_value", + client.list_documents( + document_service.ListDocumentsRequest(), + parent="parent_value", ) -def test_get_document_rest_error(): +def test_list_documents_rest_pager(transport: str = "rest"): client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + document.Document(), + ], + next_page_token="abc", + ), + document_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + ], + next_page_token="ghi", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + document_service.ListDocumentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + + pager = client.list_documents(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, document.Document) for i in results) + + pages = list(client.list_documents(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize( "request_type", [ - document_service.ListDocumentsRequest, + document_service.CreateDocumentRequest, dict, ], ) -def test_list_documents_rest(request_type): +def test_create_document_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4633,32 +5399,146 @@ def test_list_documents_rest(request_type): request_init = { "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } + request_init["document"] = { + "struct_data": {"fields": {}}, + "json_data": "json_data_value", + "name": "name_value", + "id": "id_value", + "schema_id": "schema_id_value", + "content": { + "raw_bytes": b"raw_bytes_blob", + "uri": "uri_value", + "mime_type": "mime_type_value", + }, + "parent_document_id": "parent_document_id_value", + "derived_struct_data": {}, + "acl_info": { + "readers": [ + { + "principals": [ + {"user_id": "user_id_value", "group_id": "group_id_value"} + ], + "idp_wide": True, + } + ] + }, + "index_time": {"seconds": 751, "nanos": 543}, + "index_status": { + "index_time": {}, + "error_samples": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = document_service.CreateDocumentRequest.meta.fields["document"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["document"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["document"][field])): + del request_init["document"][field][i][subfield] + else: + del request_init["document"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse( - next_page_token="next_page_token_value", + return_value = gcd_document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + json_data="json_data_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_documents(request) + response = client.create_document(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDocumentsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, gcd_document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" -def test_list_documents_rest_use_cached_wrapped_rpc(): +def test_create_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4672,35 +5552,36 @@ def test_list_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_documents in client._transport._wrapped_methods + assert client._transport.create_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_documents] = mock_rpc + client._transport._wrapped_methods[client._transport.create_document] = mock_rpc request = {} - client.list_documents(request) + client.create_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_documents(request) + client.create_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_documents_rest_required_fields( - request_type=document_service.ListDocumentsRequest, +def test_create_document_rest_required_fields( + request_type=document_service.CreateDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["document_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4708,31 +5589,32 @@ def test_list_documents_rest_required_fields( ) # verify fields with default values are dropped + assert "documentId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_documents._get_unset_required_fields(jsonified_request) + ).create_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "documentId" in jsonified_request + assert jsonified_request["documentId"] == request_init["document_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["documentId"] = "document_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_documents._get_unset_required_fields(jsonified_request) + ).create_document._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("document_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "documentId" in jsonified_request + assert jsonified_request["documentId"] == "document_id_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4741,7 +5623,7 @@ def test_list_documents_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse() + return_value = gcd_document.Document() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4753,47 +5635,55 @@ def test_list_documents_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_documents(request) + response = client.create_document(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "documentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_documents_rest_unset_required_fields(): +def test_create_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_documents._get_unset_required_fields({}) + unset_fields = transport.create_document._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("documentId",)) + & set( ( - "pageSize", - "pageToken", + "parent", + "document", + "documentId", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_documents_rest_interceptors(null_interceptor): +def test_create_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4806,14 +5696,14 @@ def test_list_documents_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_list_documents" + transports.DocumentServiceRestInterceptor, "post_create_document" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_list_documents" + transports.DocumentServiceRestInterceptor, "pre_create_document" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.ListDocumentsRequest.pb( - document_service.ListDocumentsRequest() + pb_message = document_service.CreateDocumentRequest.pb( + document_service.CreateDocumentRequest() ) transcode.return_value = { "method": "post", @@ -4825,19 +5715,19 @@ def test_list_documents_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = document_service.ListDocumentsResponse.to_json( - document_service.ListDocumentsResponse() + req.return_value._content = gcd_document.Document.to_json( + gcd_document.Document() ) - request = document_service.ListDocumentsRequest() + request = document_service.CreateDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = document_service.ListDocumentsResponse() + post.return_value = gcd_document.Document() - client.list_documents( + client.create_document( request, metadata=[ ("key", "val"), @@ -4849,8 +5739,8 @@ def test_list_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_documents_rest_bad_request( - transport: str = "rest", request_type=document_service.ListDocumentsRequest +def test_create_document_rest_bad_request( + transport: str = "rest", request_type=document_service.CreateDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4872,10 +5762,10 @@ def test_list_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_documents(request) + client.create_document(request) -def test_list_documents_rest_flattened(): +def test_create_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4884,7 +5774,7 @@ def test_list_documents_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse() + return_value = gcd_document.Document() # get arguments that satisfy an http rule for this method sample_request = { @@ -4894,6 +5784,16 @@ def test_list_documents_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", ) mock_args.update(sample_request) @@ -4901,12 +5801,12 @@ def test_list_documents_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_documents(**mock_args) + client.create_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -4919,7 +5819,7 @@ def test_list_documents_rest_flattened(): ) -def test_list_documents_rest_flattened_error(transport: str = "rest"): +def test_create_document_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4928,85 +5828,36 @@ def test_list_documents_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_documents( - document_service.ListDocumentsRequest(), + client.create_document( + document_service.CreateDocumentRequest(), parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", ) -def test_list_documents_rest_pager(transport: str = "rest"): +def test_create_document_rest_error(): client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - document.Document(), - ], - next_page_token="abc", - ), - document_service.ListDocumentsResponse( - documents=[], - next_page_token="def", - ), - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - ], - next_page_token="ghi", - ), - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - document_service.ListDocumentsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" - } - - pager = client.list_documents(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, document.Document) for i in results) - - pages = list(client.list_documents(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize( "request_type", [ - document_service.CreateDocumentRequest, + document_service.UpdateDocumentRequest, dict, ], ) -def test_create_document_rest(request_type): +def test_update_document_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5014,12 +5865,14 @@ def test_create_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } request_init["document"] = { "struct_data": {"fields": {}}, "json_data": "json_data_value", - "name": "name_value", + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5", "id": "id_value", "schema_id": "schema_id_value", "content": { @@ -5040,13 +5893,28 @@ def test_create_document_rest(request_type): ] }, "index_time": {"seconds": 751, "nanos": 543}, + "index_status": { + "index_time": {}, + "error_samples": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = document_service.CreateDocumentRequest.meta.fields["document"] + test_field = document_service.UpdateDocumentRequest.meta.fields["document"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -5130,7 +5998,7 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_document(request) + response = client.update_document(request) # Establish that the response is the type that we expect. assert isinstance(response, gcd_document.Document) @@ -5140,7 +6008,7 @@ def get_message_fields(field): assert response.parent_document_id == "parent_document_id_value" -def test_create_document_rest_use_cached_wrapped_rpc(): +def test_update_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5154,36 +6022,34 @@ def test_create_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_document in client._transport._wrapped_methods + assert client._transport.update_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_document] = mock_rpc + client._transport._wrapped_methods[client._transport.update_document] = mock_rpc request = {} - client.create_document(request) + client.update_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_document(request) + client.update_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_document_rest_required_fields( - request_type=document_service.CreateDocumentRequest, +def test_update_document_rest_required_fields( + request_type=document_service.UpdateDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["document_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5191,32 +6057,27 @@ def test_create_document_rest_required_fields( ) # verify fields with default values are dropped - assert "documentId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_document._get_unset_required_fields(jsonified_request) + ).update_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "documentId" in jsonified_request - assert jsonified_request["documentId"] == request_init["document_id"] - - jsonified_request["parent"] = "parent_value" - jsonified_request["documentId"] = "document_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_document._get_unset_required_fields(jsonified_request) + ).update_document._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("document_id",)) + assert not set(unset_fields) - set( + ( + "allow_missing", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "documentId" in jsonified_request - assert jsonified_request["documentId"] == "document_id_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5237,7 +6098,7 @@ def test_create_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -5253,39 +6114,32 @@ def test_create_document_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_document(request) + response = client.update_document(request) - expected_params = [ - ( - "documentId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_document_rest_unset_required_fields(): +def test_update_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_document._get_unset_required_fields({}) + unset_fields = transport.update_document._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("documentId",)) - & set( + set( ( - "parent", - "document", - "documentId", + "allowMissing", + "updateMask", ) ) + & set(("document",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_document_rest_interceptors(null_interceptor): +def test_update_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5298,14 +6152,14 @@ def test_create_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_create_document" + transports.DocumentServiceRestInterceptor, "post_update_document" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_create_document" + transports.DocumentServiceRestInterceptor, "pre_update_document" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.CreateDocumentRequest.pb( - document_service.CreateDocumentRequest() + pb_message = document_service.UpdateDocumentRequest.pb( + document_service.UpdateDocumentRequest() ) transcode.return_value = { "method": "post", @@ -5321,7 +6175,7 @@ def test_create_document_rest_interceptors(null_interceptor): gcd_document.Document() ) - request = document_service.CreateDocumentRequest() + request = document_service.UpdateDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -5329,7 +6183,7 @@ def test_create_document_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = gcd_document.Document() - client.create_document( + client.update_document( request, metadata=[ ("key", "val"), @@ -5341,8 +6195,8 @@ def test_create_document_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_document_rest_bad_request( - transport: str = "rest", request_type=document_service.CreateDocumentRequest +def test_update_document_rest_bad_request( + transport: str = "rest", request_type=document_service.UpdateDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5351,7 +6205,9 @@ def test_create_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } request = request_type(**request_init) @@ -5364,10 +6220,10 @@ def test_create_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_document(request) + client.update_document(request) -def test_create_document_rest_flattened(): +def test_update_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5380,12 +6236,13 @@ def test_create_document_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", document=gcd_document.Document( struct_data=struct_pb2.Struct( fields={ @@ -5395,7 +6252,7 @@ def test_create_document_rest_flattened(): } ) ), - document_id="document_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -5408,20 +6265,20 @@ def test_create_document_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_document(**mock_args) + client.update_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents" + "%s/v1alpha/{document.name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" % client.transport._host, args[1], ) -def test_create_document_rest_flattened_error(transport: str = "rest"): +def test_update_document_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5430,9 +6287,8 @@ def test_create_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_document( - document_service.CreateDocumentRequest(), - parent="parent_value", + client.update_document( + document_service.UpdateDocumentRequest(), document=gcd_document.Document( struct_data=struct_pb2.Struct( fields={ @@ -5442,11 +6298,11 @@ def test_create_document_rest_flattened_error(transport: str = "rest"): } ) ), - document_id="document_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_document_rest_error(): +def test_update_document_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5455,11 +6311,11 @@ def test_create_document_rest_error(): @pytest.mark.parametrize( "request_type", [ - document_service.UpdateDocumentRequest, + document_service.DeleteDocumentRequest, dict, ], ) -def test_update_document_rest(request_type): +def test_delete_document_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5467,135 +6323,29 @@ def test_update_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } - } - request_init["document"] = { - "struct_data": {"fields": {}}, - "json_data": "json_data_value", - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5", - "id": "id_value", - "schema_id": "schema_id_value", - "content": { - "raw_bytes": b"raw_bytes_blob", - "uri": "uri_value", - "mime_type": "mime_type_value", - }, - "parent_document_id": "parent_document_id_value", - "derived_struct_data": {}, - "acl_info": { - "readers": [ - { - "principals": [ - {"user_id": "user_id_value", "group_id": "group_id_value"} - ], - "idp_wide": True, - } - ] - }, - "index_time": {"seconds": 751, "nanos": 543}, + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = document_service.UpdateDocumentRequest.meta.fields["document"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["document"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["document"][field])): - del request_init["document"][field][i][subfield] - else: - del request_init["document"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcd_document.Document( - name="name_value", - id="id_value", - schema_id="schema_id_value", - parent_document_id="parent_document_id_value", - json_data="json_data_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_document(request) + response = client.delete_document(request) # Establish that the response is the type that we expect. - assert isinstance(response, gcd_document.Document) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.schema_id == "schema_id_value" - assert response.parent_document_id == "parent_document_id_value" + assert response is None -def test_update_document_rest_use_cached_wrapped_rpc(): +def test_delete_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5609,34 +6359,35 @@ def test_update_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_document in client._transport._wrapped_methods + assert client._transport.delete_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_document] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc request = {} - client.update_document(request) + client.delete_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_document(request) + client.delete_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_document_rest_required_fields( - request_type=document_service.UpdateDocumentRequest, +def test_delete_document_rest_required_fields( + request_type=document_service.DeleteDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5647,24 +6398,21 @@ def test_update_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_document._get_unset_required_fields(jsonified_request) + ).delete_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_document._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "allow_missing", - "update_mask", - ) - ) + ).delete_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5673,7 +6421,7 @@ def test_update_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gcd_document.Document() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5685,48 +6433,36 @@ def test_update_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_document(request) + response = client.delete_document(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_document_rest_unset_required_fields(): +def test_delete_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_document._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "allowMissing", - "updateMask", - ) - ) - & set(("document",)) - ) + unset_fields = transport.delete_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_document_rest_interceptors(null_interceptor): +def test_delete_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5739,14 +6475,11 @@ def test_update_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_update_document" - ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_update_document" + transports.DocumentServiceRestInterceptor, "pre_delete_document" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = document_service.UpdateDocumentRequest.pb( - document_service.UpdateDocumentRequest() + pb_message = document_service.DeleteDocumentRequest.pb( + document_service.DeleteDocumentRequest() ) transcode.return_value = { "method": "post", @@ -5758,19 +6491,15 @@ def test_update_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = gcd_document.Document.to_json( - gcd_document.Document() - ) - request = document_service.UpdateDocumentRequest() + request = document_service.DeleteDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gcd_document.Document() - client.update_document( + client.delete_document( request, metadata=[ ("key", "val"), @@ -5779,11 +6508,10 @@ def test_update_document_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_update_document_rest_bad_request( - transport: str = "rest", request_type=document_service.UpdateDocumentRequest +def test_delete_document_rest_bad_request( + transport: str = "rest", request_type=document_service.DeleteDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5792,9 +6520,7 @@ def test_update_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } request = request_type(**request_init) @@ -5807,10 +6533,10 @@ def test_update_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_document(request) + client.delete_document(request) -def test_update_document_rest_flattened(): +def test_delete_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5819,53 +6545,40 @@ def test_update_document_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcd_document.Document() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } - # get truthy value for each flattened field - mock_args = dict( - document=gcd_document.Document( - struct_data=struct_pb2.Struct( - fields={ - "key_value": struct_pb2.Value( - null_value=struct_pb2.NullValue.NULL_VALUE - ) - } - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_document(**mock_args) + client.delete_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{document.name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + "%s/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" % client.transport._host, args[1], ) -def test_update_document_rest_flattened_error(transport: str = "rest"): +def test_delete_document_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5874,22 +6587,13 @@ def test_update_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_document( - document_service.UpdateDocumentRequest(), - document=gcd_document.Document( - struct_data=struct_pb2.Struct( - fields={ - "key_value": struct_pb2.Value( - null_value=struct_pb2.NullValue.NULL_VALUE - ) - } - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_document( + document_service.DeleteDocumentRequest(), + name="name_value", ) -def test_update_document_rest_error(): +def test_delete_document_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5898,11 +6602,11 @@ def test_update_document_rest_error(): @pytest.mark.parametrize( "request_type", [ - document_service.DeleteDocumentRequest, + import_config.ImportDocumentsRequest, dict, ], ) -def test_delete_document_rest(request_type): +def test_import_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5910,29 +6614,29 @@ def test_delete_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_document(request) + response = client.import_documents(request) # Establish that the response is the type that we expect. - assert response is None + assert response.operation.name == "operations/spam" -def test_delete_document_rest_use_cached_wrapped_rpc(): +def test_import_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5946,35 +6650,41 @@ def test_delete_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_document in client._transport._wrapped_methods + assert client._transport.import_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc + client._transport._wrapped_methods[ + client._transport.import_documents + ] = mock_rpc request = {} - client.delete_document(request) + client.import_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_document(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_document_rest_required_fields( - request_type=document_service.DeleteDocumentRequest, +def test_import_documents_rest_required_fields( + request_type=import_config.ImportDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5985,21 +6695,21 @@ def test_delete_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_document._get_unset_required_fields(jsonified_request) + ).import_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_document._get_unset_required_fields(jsonified_request) + ).import_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6008,7 +6718,7 @@ def test_delete_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6020,36 +6730,37 @@ def test_delete_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_document(request) + response = client.import_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_document_rest_unset_required_fields(): +def test_import_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_document._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.import_documents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_document_rest_interceptors(null_interceptor): +def test_import_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6062,11 +6773,16 @@ def test_delete_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_delete_document" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_import_documents" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_import_documents" ) as pre: pre.assert_not_called() - pb_message = document_service.DeleteDocumentRequest.pb( - document_service.DeleteDocumentRequest() + post.assert_not_called() + pb_message = import_config.ImportDocumentsRequest.pb( + import_config.ImportDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -6078,15 +6794,19 @@ def test_delete_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = document_service.DeleteDocumentRequest() + request = import_config.ImportDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - client.delete_document( + client.import_documents( request, metadata=[ ("key", "val"), @@ -6095,10 +6815,11 @@ def test_delete_document_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_document_rest_bad_request( - transport: str = "rest", request_type=document_service.DeleteDocumentRequest +def test_import_documents_rest_bad_request( + transport: str = "rest", request_type=import_config.ImportDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6107,7 +6828,7 @@ def test_delete_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) @@ -6120,67 +6841,10 @@ def test_delete_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_document(request) - - -def test_delete_document_rest_flattened(): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_document(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_document_rest_flattened_error(transport: str = "rest"): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_document( - document_service.DeleteDocumentRequest(), - name="name_value", - ) + client.import_documents(request) -def test_delete_document_rest_error(): +def test_import_documents_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6189,11 +6853,11 @@ def test_delete_document_rest_error(): @pytest.mark.parametrize( "request_type", [ - import_config.ImportDocumentsRequest, + purge_config.PurgeDocumentsRequest, dict, ], ) -def test_import_documents_rest(request_type): +def test_purge_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6217,13 +6881,13 @@ def test_import_documents_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_documents(request) + response = client.purge_documents(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_import_documents_rest_use_cached_wrapped_rpc(): +def test_purge_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6237,19 +6901,17 @@ def test_import_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.import_documents in client._transport._wrapped_methods + assert client._transport.purge_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.import_documents - ] = mock_rpc + client._transport._wrapped_methods[client._transport.purge_documents] = mock_rpc request = {} - client.import_documents(request) + client.purge_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -6258,20 +6920,21 @@ def test_import_documents_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.import_documents(request) + client.purge_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_import_documents_rest_required_fields( - request_type=import_config.ImportDocumentsRequest, +def test_purge_documents_rest_required_fields( + request_type=purge_config.PurgeDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["filter"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6282,21 +6945,24 @@ def test_import_documents_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_documents._get_unset_required_fields(jsonified_request) + ).purge_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" + jsonified_request["filter"] = "filter_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_documents._get_unset_required_fields(jsonified_request) + ).purge_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "filter" in jsonified_request + assert jsonified_request["filter"] == "filter_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6330,24 +6996,32 @@ def test_import_documents_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_documents(request) + response = client.purge_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_import_documents_rest_unset_required_fields(): +def test_purge_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.import_documents._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.purge_documents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "filter", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_documents_rest_interceptors(null_interceptor): +def test_purge_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6362,14 +7036,14 @@ def test_import_documents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_import_documents" + transports.DocumentServiceRestInterceptor, "post_purge_documents" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_import_documents" + transports.DocumentServiceRestInterceptor, "pre_purge_documents" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = import_config.ImportDocumentsRequest.pb( - import_config.ImportDocumentsRequest() + pb_message = purge_config.PurgeDocumentsRequest.pb( + purge_config.PurgeDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -6385,7 +7059,7 @@ def test_import_documents_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = import_config.ImportDocumentsRequest() + request = purge_config.PurgeDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -6393,7 +7067,7 @@ def test_import_documents_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.import_documents( + client.purge_documents( request, metadata=[ ("key", "val"), @@ -6405,8 +7079,8 @@ def test_import_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_import_documents_rest_bad_request( - transport: str = "rest", request_type=import_config.ImportDocumentsRequest +def test_purge_documents_rest_bad_request( + transport: str = "rest", request_type=purge_config.PurgeDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6428,10 +7102,10 @@ def test_import_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.import_documents(request) + client.purge_documents(request) -def test_import_documents_rest_error(): +def test_purge_documents_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6440,11 +7114,11 @@ def test_import_documents_rest_error(): @pytest.mark.parametrize( "request_type", [ - purge_config.PurgeDocumentsRequest, + document_service.GetProcessedDocumentRequest, dict, ], ) -def test_purge_documents_rest(request_type): +def test_get_processed_document_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6452,29 +7126,35 @@ def test_purge_documents_rest(request_type): # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = document.ProcessedDocument( + document="document_value", + json_data="json_data_value", + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.ProcessedDocument.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.purge_documents(request) + response = client.get_processed_document(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, document.ProcessedDocument) + assert response.document == "document_value" -def test_purge_documents_rest_use_cached_wrapped_rpc(): +def test_get_processed_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6488,40 +7168,40 @@ def test_purge_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.purge_documents in client._transport._wrapped_methods + assert ( + client._transport.get_processed_document + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.purge_documents] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_processed_document + ] = mock_rpc request = {} - client.purge_documents(request) + client.get_processed_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.purge_documents(request) + client.get_processed_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_purge_documents_rest_required_fields( - request_type=purge_config.PurgeDocumentsRequest, +def test_get_processed_document_rest_required_fields( + request_type=document_service.GetProcessedDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["filter"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6532,24 +7212,28 @@ def test_purge_documents_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).purge_documents._get_unset_required_fields(jsonified_request) + ).get_processed_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - jsonified_request["filter"] = "filter_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).purge_documents._get_unset_required_fields(jsonified_request) + ).get_processed_document._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "processed_document_format", + "processed_document_type", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "filter" in jsonified_request - assert jsonified_request["filter"] == "filter_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6558,7 +7242,7 @@ def test_purge_documents_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = document.ProcessedDocument() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6570,45 +7254,52 @@ def test_purge_documents_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = document.ProcessedDocument.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.purge_documents(request) + response = client.get_processed_document(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_purge_documents_rest_unset_required_fields(): +def test_get_processed_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.purge_documents._get_unset_required_fields({}) + unset_fields = transport.get_processed_document._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set( + ( + "processedDocumentFormat", + "processedDocumentType", + ) + ) & set( ( - "parent", - "filter", + "name", + "processedDocumentType", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_purge_documents_rest_interceptors(null_interceptor): +def test_get_processed_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6621,16 +7312,14 @@ def test_purge_documents_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_purge_documents" + transports.DocumentServiceRestInterceptor, "post_get_processed_document" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_purge_documents" + transports.DocumentServiceRestInterceptor, "pre_get_processed_document" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = purge_config.PurgeDocumentsRequest.pb( - purge_config.PurgeDocumentsRequest() + pb_message = document_service.GetProcessedDocumentRequest.pb( + document_service.GetProcessedDocumentRequest() ) transcode.return_value = { "method": "post", @@ -6642,19 +7331,19 @@ def test_purge_documents_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = document.ProcessedDocument.to_json( + document.ProcessedDocument() ) - request = purge_config.PurgeDocumentsRequest() + request = document_service.GetProcessedDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = document.ProcessedDocument() - client.purge_documents( + client.get_processed_document( request, metadata=[ ("key", "val"), @@ -6666,8 +7355,8 @@ def test_purge_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_purge_documents_rest_bad_request( - transport: str = "rest", request_type=purge_config.PurgeDocumentsRequest +def test_get_processed_document_rest_bad_request( + transport: str = "rest", request_type=document_service.GetProcessedDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6676,7 +7365,7 @@ def test_purge_documents_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } request = request_type(**request_init) @@ -6689,10 +7378,69 @@ def test_purge_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.purge_documents(request) + client.get_processed_document(request) -def test_purge_documents_rest_error(): +def test_get_processed_document_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.ProcessedDocument() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.ProcessedDocument.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_processed_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}:getProcessedDocument" + % client.transport._host, + args[1], + ) + + +def test_get_processed_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_processed_document( + document_service.GetProcessedDocumentRequest(), + name="name_value", + ) + + +def test_get_processed_document_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6701,11 +7449,11 @@ def test_purge_documents_rest_error(): @pytest.mark.parametrize( "request_type", [ - document_service.GetProcessedDocumentRequest, + document_service.BatchGetDocumentsMetadataRequest, dict, ], ) -def test_get_processed_document_rest(request_type): +def test_batch_get_documents_metadata_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6713,35 +7461,33 @@ def test_get_processed_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.ProcessedDocument( - document="document_value", - json_data="json_data_value", - ) + return_value = document_service.BatchGetDocumentsMetadataResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.ProcessedDocument.pb(return_value) + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_processed_document(request) + response = client.batch_get_documents_metadata(request) # Establish that the response is the type that we expect. - assert isinstance(response, document.ProcessedDocument) - assert response.document == "document_value" + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) -def test_get_processed_document_rest_use_cached_wrapped_rpc(): +def test_batch_get_documents_metadata_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6756,7 +7502,7 @@ def test_get_processed_document_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_processed_document + client._transport.batch_get_documents_metadata in client._transport._wrapped_methods ) @@ -6766,29 +7512,29 @@ def test_get_processed_document_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_processed_document + client._transport.batch_get_documents_metadata ] = mock_rpc request = {} - client.get_processed_document(request) + client.batch_get_documents_metadata(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_processed_document(request) + client.batch_get_documents_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_processed_document_rest_required_fields( - request_type=document_service.GetProcessedDocumentRequest, +def test_batch_get_documents_metadata_rest_required_fields( + request_type=document_service.BatchGetDocumentsMetadataRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6799,28 +7545,23 @@ def test_get_processed_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_processed_document._get_unset_required_fields(jsonified_request) + ).batch_get_documents_metadata._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_processed_document._get_unset_required_fields(jsonified_request) + ).batch_get_documents_metadata._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "processed_document_format", - "processed_document_type", - ) - ) + assert not set(unset_fields) - set(("matcher",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6829,7 +7570,7 @@ def test_get_processed_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = document.ProcessedDocument() + return_value = document_service.BatchGetDocumentsMetadataResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6850,43 +7591,40 @@ def test_get_processed_document_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.ProcessedDocument.pb(return_value) + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_processed_document(request) + response = client.batch_get_documents_metadata(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_processed_document_rest_unset_required_fields(): +def test_batch_get_documents_metadata_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_processed_document._get_unset_required_fields({}) + unset_fields = transport.batch_get_documents_metadata._get_unset_required_fields({}) assert set(unset_fields) == ( - set( - ( - "processedDocumentFormat", - "processedDocumentType", - ) - ) + set(("matcher",)) & set( ( - "name", - "processedDocumentType", + "parent", + "matcher", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_processed_document_rest_interceptors(null_interceptor): +def test_batch_get_documents_metadata_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6899,14 +7637,14 @@ def test_get_processed_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_get_processed_document" + transports.DocumentServiceRestInterceptor, "post_batch_get_documents_metadata" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_get_processed_document" + transports.DocumentServiceRestInterceptor, "pre_batch_get_documents_metadata" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.GetProcessedDocumentRequest.pb( - document_service.GetProcessedDocumentRequest() + pb_message = document_service.BatchGetDocumentsMetadataRequest.pb( + document_service.BatchGetDocumentsMetadataRequest() ) transcode.return_value = { "method": "post", @@ -6918,19 +7656,21 @@ def test_get_processed_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = document.ProcessedDocument.to_json( - document.ProcessedDocument() + req.return_value._content = ( + document_service.BatchGetDocumentsMetadataResponse.to_json( + document_service.BatchGetDocumentsMetadataResponse() + ) ) - request = document_service.GetProcessedDocumentRequest() + request = document_service.BatchGetDocumentsMetadataRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = document.ProcessedDocument() + post.return_value = document_service.BatchGetDocumentsMetadataResponse() - client.get_processed_document( + client.batch_get_documents_metadata( request, metadata=[ ("key", "val"), @@ -6942,8 +7682,9 @@ def test_get_processed_document_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_processed_document_rest_bad_request( - transport: str = "rest", request_type=document_service.GetProcessedDocumentRequest +def test_batch_get_documents_metadata_rest_bad_request( + transport: str = "rest", + request_type=document_service.BatchGetDocumentsMetadataRequest, ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6952,7 +7693,7 @@ def test_get_processed_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) @@ -6965,10 +7706,10 @@ def test_get_processed_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_processed_document(request) + client.batch_get_documents_metadata(request) -def test_get_processed_document_rest_flattened(): +def test_batch_get_documents_metadata_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6977,16 +7718,16 @@ def test_get_processed_document_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.ProcessedDocument() + return_value = document_service.BatchGetDocumentsMetadataResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -6994,25 +7735,27 @@ def test_get_processed_document_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.ProcessedDocument.pb(return_value) + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_processed_document(**mock_args) + client.batch_get_documents_metadata(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}:getProcessedDocument" + "%s/v1alpha/{parent=projects/*/locations/*/dataStores/*/branches/*}/batchGetDocumentsMetadata" % client.transport._host, args[1], ) -def test_get_processed_document_rest_flattened_error(transport: str = "rest"): +def test_batch_get_documents_metadata_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7021,13 +7764,13 @@ def test_get_processed_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_processed_document( - document_service.GetProcessedDocumentRequest(), - name="name_value", + client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", ) -def test_get_processed_document_rest_error(): +def test_batch_get_documents_metadata_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -7180,6 +7923,7 @@ def test_document_service_base_transport(): "import_documents", "purge_documents", "get_processed_document", + "batch_get_documents_metadata", "get_operation", "cancel_operation", "list_operations", @@ -7487,6 +8231,9 @@ def test_document_service_client_transport_session_collision(transport_name): session1 = client1.transport.get_processed_document._session session2 = client2.transport.get_processed_document._session assert session1 != session2 + session1 = client1.transport.batch_get_documents_metadata._session + session2 = client2.transport.batch_get_documents_metadata._session + assert session1 != session2 def test_document_service_grpc_transport_channel(): diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py index b23c90ee838d..45144303f6e4 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py @@ -2455,6 +2455,7 @@ def test_write_user_event_rest(request_type): "uri": "uri_value", "quantity": 895, "promotion_ids": ["promotion_ids_value1", "promotion_ids_value2"], + "joined": True, } ], "panel": { diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py index 6ffa744aae25..9345701436e5 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py @@ -45,10 +45,12 @@ from google.cloud.location import locations_pb2 from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import json_format from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.type import date_pb2 # type: ignore import grpc from grpc.experimental import aio @@ -3911,6 +3913,387 @@ async def test_purge_documents_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + document_service.BatchGetDocumentsMetadataRequest, + dict, + ], +) +def test_batch_get_documents_metadata(request_type, transport: str = "grpc"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + response = client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = document_service.BatchGetDocumentsMetadataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) + + +def test_batch_get_documents_metadata_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.batch_get_documents_metadata() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest() + + +def test_batch_get_documents_metadata_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = document_service.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.batch_get_documents_metadata(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest( + parent="parent_value", + ) + + +def test_batch_get_documents_metadata_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.batch_get_documents_metadata + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_get_documents_metadata + ] = mock_rpc + request = {} + client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_get_documents_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + response = await client.batch_get_documents_metadata() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == document_service.BatchGetDocumentsMetadataRequest() + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.batch_get_documents_metadata + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.batch_get_documents_metadata + ] = mock_rpc + + request = {} + await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.batch_get_documents_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async( + transport: str = "grpc_asyncio", + request_type=document_service.BatchGetDocumentsMetadataRequest, +): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + response = await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = document_service.BatchGetDocumentsMetadataRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_async_from_dict(): + await test_batch_get_documents_metadata_async(request_type=dict) + + +def test_batch_get_documents_metadata_field_headers(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.BatchGetDocumentsMetadataRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_field_headers_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = document_service.BatchGetDocumentsMetadataRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + await client.batch_get_documents_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_batch_get_documents_metadata_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_get_documents_metadata( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_batch_get_documents_metadata_flattened_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_flattened_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_documents_metadata), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = document_service.BatchGetDocumentsMetadataResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + document_service.BatchGetDocumentsMetadataResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_get_documents_metadata( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_batch_get_documents_metadata_flattened_error_async(): + client = DocumentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3918,7 +4301,321 @@ async def test_purge_documents_field_headers_async(): dict, ], ) -def test_get_document_rest(request_type): +def test_get_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + json_data="json_data_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" + + +def test_get_document_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_document in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_document] = mock_rpc + + request = {} + client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_document_rest_required_fields( + request_type=document_service.GetDocumentRequest, +): + transport_class = transports.DocumentServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = document.Document() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_document_rest_unset_required_fields(): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_document_rest_interceptors(null_interceptor): + transport = transports.DocumentServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DocumentServiceRestInterceptor(), + ) + client = DocumentServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_get_document" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_get_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = document_service.GetDocumentRequest.pb( + document_service.GetDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = document.Document.to_json(document.Document()) + + request = document_service.GetDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = document.Document() + + client.get_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_document_rest_bad_request( + transport: str = "rest", request_type=document_service.GetDocumentRequest +): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_document(request) + + +def test_get_document_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document.Document() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + % client.transport._host, + args[1], + ) + + +def test_get_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_document( + document_service.GetDocumentRequest(), + name="name_value", + ) + + +def test_get_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.ListDocumentsRequest, + dict, + ], +) +def test_list_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3926,41 +4623,34 @@ def test_get_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.Document( - name="name_value", - id="id_value", - schema_id="schema_id_value", - parent_document_id="parent_document_id_value", - json_data="json_data_value", + return_value = document_service.ListDocumentsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_document(request) + response = client.list_documents(request) # Establish that the response is the type that we expect. - assert isinstance(response, document.Document) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.schema_id == "schema_id_value" - assert response.parent_document_id == "parent_document_id_value" + assert isinstance(response, pagers.ListDocumentsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_document_rest_use_cached_wrapped_rpc(): +def test_list_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3974,35 +4664,35 @@ def test_get_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_document in client._transport._wrapped_methods + assert client._transport.list_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_document] = mock_rpc + client._transport._wrapped_methods[client._transport.list_documents] = mock_rpc request = {} - client.get_document(request) + client.list_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_document(request) + client.list_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_document_rest_required_fields( - request_type=document_service.GetDocumentRequest, +def test_list_documents_rest_required_fields( + request_type=document_service.ListDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4013,21 +4703,28 @@ def test_get_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_document._get_unset_required_fields(jsonified_request) + ).list_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_document._get_unset_required_fields(jsonified_request) + ).list_documents._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4036,7 +4733,7 @@ def test_get_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = document.Document() + return_value = document_service.ListDocumentsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4057,30 +4754,38 @@ def test_get_document_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.get_document(request) + response = client.list_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_document_rest_unset_required_fields(): +def test_list_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_document._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_documents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_document_rest_interceptors(null_interceptor): +def test_list_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4093,14 +4798,14 @@ def test_get_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_get_document" + transports.DocumentServiceRestInterceptor, "post_list_documents" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_get_document" + transports.DocumentServiceRestInterceptor, "pre_list_documents" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.GetDocumentRequest.pb( - document_service.GetDocumentRequest() + pb_message = document_service.ListDocumentsRequest.pb( + document_service.ListDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -4112,17 +4817,19 @@ def test_get_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = document.Document.to_json(document.Document()) + req.return_value._content = document_service.ListDocumentsResponse.to_json( + document_service.ListDocumentsResponse() + ) - request = document_service.GetDocumentRequest() + request = document_service.ListDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = document.Document() + post.return_value = document_service.ListDocumentsResponse() - client.get_document( + client.list_documents( request, metadata=[ ("key", "val"), @@ -4134,8 +4841,8 @@ def test_get_document_rest_interceptors(null_interceptor): post.assert_called_once() -def test_get_document_rest_bad_request( - transport: str = "rest", request_type=document_service.GetDocumentRequest +def test_list_documents_rest_bad_request( + transport: str = "rest", request_type=document_service.ListDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4144,7 +4851,7 @@ def test_get_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) @@ -4157,10 +4864,10 @@ def test_get_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_document(request) + client.list_documents(request) -def test_get_document_rest_flattened(): +def test_list_documents_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4169,16 +4876,16 @@ def test_get_document_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document.Document() + return_value = document_service.ListDocumentsResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -4186,25 +4893,25 @@ def test_get_document_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document.Document.pb(return_value) + return_value = document_service.ListDocumentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.get_document(**mock_args) + client.list_documents(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + "%s/v1beta/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents" % client.transport._host, args[1], ) -def test_get_document_rest_flattened_error(transport: str = "rest"): +def test_list_documents_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4213,61 +4920,224 @@ def test_get_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_document( - document_service.GetDocumentRequest(), - name="name_value", + client.list_documents( + document_service.ListDocumentsRequest(), + parent="parent_value", + ) + + +def test_list_documents_rest_pager(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + document.Document(), + ], + next_page_token="abc", + ), + document_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + ], + next_page_token="ghi", + ), + document_service.ListDocumentsResponse( + documents=[ + document.Document(), + document.Document(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + document_service.ListDocumentsResponse.to_json(x) for x in response ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + + pager = client.list_documents(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, document.Document) for i in results) + + pages = list(client.list_documents(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + document_service.CreateDocumentRequest, + dict, + ], +) +def test_create_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + request_init["document"] = { + "struct_data": {"fields": {}}, + "json_data": "json_data_value", + "name": "name_value", + "id": "id_value", + "schema_id": "schema_id_value", + "content": { + "raw_bytes": b"raw_bytes_blob", + "uri": "uri_value", + "mime_type": "mime_type_value", + }, + "parent_document_id": "parent_document_id_value", + "derived_struct_data": {}, + "index_time": {"seconds": 751, "nanos": 543}, + "index_status": { + "index_time": {}, + "error_samples": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = document_service.CreateDocumentRequest.meta.fields["document"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] -def test_get_document_rest_error(): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["document"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - document_service.ListDocumentsRequest, - dict, - ], -) -def test_list_documents_rest(request_type): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" - } + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["document"][field])): + del request_init["document"][field][i][subfield] + else: + del request_init["document"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse( - next_page_token="next_page_token_value", + return_value = gcd_document.Document( + name="name_value", + id="id_value", + schema_id="schema_id_value", + parent_document_id="parent_document_id_value", + json_data="json_data_value", ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_documents(request) + response = client.create_document(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDocumentsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, gcd_document.Document) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.schema_id == "schema_id_value" + assert response.parent_document_id == "parent_document_id_value" -def test_list_documents_rest_use_cached_wrapped_rpc(): +def test_create_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4281,35 +5151,36 @@ def test_list_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_documents in client._transport._wrapped_methods + assert client._transport.create_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_documents] = mock_rpc + client._transport._wrapped_methods[client._transport.create_document] = mock_rpc request = {} - client.list_documents(request) + client.create_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_documents(request) + client.create_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_documents_rest_required_fields( - request_type=document_service.ListDocumentsRequest, +def test_create_document_rest_required_fields( + request_type=document_service.CreateDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["document_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4317,31 +5188,32 @@ def test_list_documents_rest_required_fields( ) # verify fields with default values are dropped + assert "documentId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_documents._get_unset_required_fields(jsonified_request) + ).create_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "documentId" in jsonified_request + assert jsonified_request["documentId"] == request_init["document_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["documentId"] = "document_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_documents._get_unset_required_fields(jsonified_request) + ).create_document._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("document_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "documentId" in jsonified_request + assert jsonified_request["documentId"] == "document_id_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4350,7 +5222,7 @@ def test_list_documents_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse() + return_value = gcd_document.Document() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4362,47 +5234,55 @@ def test_list_documents_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.list_documents(request) + response = client.create_document(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "documentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_documents_rest_unset_required_fields(): +def test_create_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_documents._get_unset_required_fields({}) + unset_fields = transport.create_document._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("documentId",)) + & set( ( - "pageSize", - "pageToken", + "parent", + "document", + "documentId", ) ) - & set(("parent",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_documents_rest_interceptors(null_interceptor): +def test_create_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4415,14 +5295,14 @@ def test_list_documents_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_list_documents" + transports.DocumentServiceRestInterceptor, "post_create_document" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_list_documents" + transports.DocumentServiceRestInterceptor, "pre_create_document" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.ListDocumentsRequest.pb( - document_service.ListDocumentsRequest() + pb_message = document_service.CreateDocumentRequest.pb( + document_service.CreateDocumentRequest() ) transcode.return_value = { "method": "post", @@ -4434,19 +5314,19 @@ def test_list_documents_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = document_service.ListDocumentsResponse.to_json( - document_service.ListDocumentsResponse() + req.return_value._content = gcd_document.Document.to_json( + gcd_document.Document() ) - request = document_service.ListDocumentsRequest() + request = document_service.CreateDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = document_service.ListDocumentsResponse() + post.return_value = gcd_document.Document() - client.list_documents( + client.create_document( request, metadata=[ ("key", "val"), @@ -4458,8 +5338,8 @@ def test_list_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_list_documents_rest_bad_request( - transport: str = "rest", request_type=document_service.ListDocumentsRequest +def test_create_document_rest_bad_request( + transport: str = "rest", request_type=document_service.CreateDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4481,10 +5361,10 @@ def test_list_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_documents(request) + client.create_document(request) -def test_list_documents_rest_flattened(): +def test_create_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4493,7 +5373,7 @@ def test_list_documents_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = document_service.ListDocumentsResponse() + return_value = gcd_document.Document() # get arguments that satisfy an http rule for this method sample_request = { @@ -4503,6 +5383,16 @@ def test_list_documents_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", ) mock_args.update(sample_request) @@ -4510,12 +5400,12 @@ def test_list_documents_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = document_service.ListDocumentsResponse.pb(return_value) + return_value = gcd_document.Document.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.list_documents(**mock_args) + client.create_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -4528,94 +5418,45 @@ def test_list_documents_rest_flattened(): ) -def test_list_documents_rest_flattened_error(transport: str = "rest"): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_documents( - document_service.ListDocumentsRequest(), - parent="parent_value", - ) - - -def test_list_documents_rest_pager(transport: str = "rest"): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - document.Document(), - ], - next_page_token="abc", - ), - document_service.ListDocumentsResponse( - documents=[], - next_page_token="def", - ), - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - ], - next_page_token="ghi", - ), - document_service.ListDocumentsResponse( - documents=[ - document.Document(), - document.Document(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - document_service.ListDocumentsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" - } +def test_create_document_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - pager = client.list_documents(request=sample_request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_document( + document_service.CreateDocumentRequest(), + parent="parent_value", + document=gcd_document.Document( + struct_data=struct_pb2.Struct( + fields={ + "key_value": struct_pb2.Value( + null_value=struct_pb2.NullValue.NULL_VALUE + ) + } + ) + ), + document_id="document_id_value", + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, document.Document) for i in results) - pages = list(client.list_documents(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_create_document_rest_error(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( "request_type", [ - document_service.CreateDocumentRequest, + document_service.UpdateDocumentRequest, dict, ], ) -def test_create_document_rest(request_type): +def test_update_document_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4623,12 +5464,14 @@ def test_create_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } request_init["document"] = { "struct_data": {"fields": {}}, "json_data": "json_data_value", - "name": "name_value", + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5", "id": "id_value", "schema_id": "schema_id_value", "content": { @@ -4639,13 +5482,28 @@ def test_create_document_rest(request_type): "parent_document_id": "parent_document_id_value", "derived_struct_data": {}, "index_time": {"seconds": 751, "nanos": 543}, + "index_status": { + "index_time": {}, + "error_samples": [ + { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = document_service.CreateDocumentRequest.meta.fields["document"] + test_field = document_service.UpdateDocumentRequest.meta.fields["document"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -4729,7 +5587,7 @@ def get_message_fields(field): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_document(request) + response = client.update_document(request) # Establish that the response is the type that we expect. assert isinstance(response, gcd_document.Document) @@ -4739,7 +5597,7 @@ def get_message_fields(field): assert response.parent_document_id == "parent_document_id_value" -def test_create_document_rest_use_cached_wrapped_rpc(): +def test_update_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4753,36 +5611,34 @@ def test_create_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_document in client._transport._wrapped_methods + assert client._transport.update_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_document] = mock_rpc + client._transport._wrapped_methods[client._transport.update_document] = mock_rpc request = {} - client.create_document(request) + client.update_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_document(request) + client.update_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_document_rest_required_fields( - request_type=document_service.CreateDocumentRequest, +def test_update_document_rest_required_fields( + request_type=document_service.UpdateDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["document_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4790,32 +5646,27 @@ def test_create_document_rest_required_fields( ) # verify fields with default values are dropped - assert "documentId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_document._get_unset_required_fields(jsonified_request) + ).update_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "documentId" in jsonified_request - assert jsonified_request["documentId"] == request_init["document_id"] - - jsonified_request["parent"] = "parent_value" - jsonified_request["documentId"] = "document_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_document._get_unset_required_fields(jsonified_request) + ).update_document._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("document_id",)) + assert not set(unset_fields) - set( + ( + "allow_missing", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "documentId" in jsonified_request - assert jsonified_request["documentId"] == "document_id_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4836,7 +5687,7 @@ def test_create_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -4852,39 +5703,32 @@ def test_create_document_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.create_document(request) + response = client.update_document(request) - expected_params = [ - ( - "documentId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_document_rest_unset_required_fields(): +def test_update_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_document._get_unset_required_fields({}) + unset_fields = transport.update_document._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("documentId",)) - & set( + set( ( - "parent", - "document", - "documentId", + "allowMissing", + "updateMask", ) ) + & set(("document",)) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_document_rest_interceptors(null_interceptor): +def test_update_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4897,14 +5741,14 @@ def test_create_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_create_document" + transports.DocumentServiceRestInterceptor, "post_update_document" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_create_document" + transports.DocumentServiceRestInterceptor, "pre_update_document" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = document_service.CreateDocumentRequest.pb( - document_service.CreateDocumentRequest() + pb_message = document_service.UpdateDocumentRequest.pb( + document_service.UpdateDocumentRequest() ) transcode.return_value = { "method": "post", @@ -4920,7 +5764,7 @@ def test_create_document_rest_interceptors(null_interceptor): gcd_document.Document() ) - request = document_service.CreateDocumentRequest() + request = document_service.UpdateDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -4928,7 +5772,7 @@ def test_create_document_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = gcd_document.Document() - client.create_document( + client.update_document( request, metadata=[ ("key", "val"), @@ -4940,8 +5784,8 @@ def test_create_document_rest_interceptors(null_interceptor): post.assert_called_once() -def test_create_document_rest_bad_request( - transport: str = "rest", request_type=document_service.CreateDocumentRequest +def test_update_document_rest_bad_request( + transport: str = "rest", request_type=document_service.UpdateDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4950,7 +5794,9 @@ def test_create_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } request = request_type(**request_init) @@ -4963,10 +5809,10 @@ def test_create_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_document(request) + client.update_document(request) -def test_create_document_rest_flattened(): +def test_update_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4979,12 +5825,13 @@ def test_create_document_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + "document": { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", document=gcd_document.Document( struct_data=struct_pb2.Struct( fields={ @@ -4994,7 +5841,7 @@ def test_create_document_rest_flattened(): } ) ), - document_id="document_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -5007,20 +5854,20 @@ def test_create_document_rest_flattened(): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.create_document(**mock_args) + client.update_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{parent=projects/*/locations/*/dataStores/*/branches/*}/documents" + "%s/v1beta/{document.name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" % client.transport._host, args[1], ) -def test_create_document_rest_flattened_error(transport: str = "rest"): +def test_update_document_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5029,9 +5876,8 @@ def test_create_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_document( - document_service.CreateDocumentRequest(), - parent="parent_value", + client.update_document( + document_service.UpdateDocumentRequest(), document=gcd_document.Document( struct_data=struct_pb2.Struct( fields={ @@ -5041,150 +5887,54 @@ def test_create_document_rest_flattened_error(transport: str = "rest"): } ) ), - document_id="document_id_value", + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_document_rest_error(): +def test_update_document_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - document_service.UpdateDocumentRequest, - dict, - ], -) -def test_update_document_rest(request_type): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } - } - request_init["document"] = { - "struct_data": {"fields": {}}, - "json_data": "json_data_value", - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5", - "id": "id_value", - "schema_id": "schema_id_value", - "content": { - "raw_bytes": b"raw_bytes_blob", - "uri": "uri_value", - "mime_type": "mime_type_value", - }, - "parent_document_id": "parent_document_id_value", - "derived_struct_data": {}, - "index_time": {"seconds": 751, "nanos": 543}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = document_service.UpdateDocumentRequest.meta.fields["document"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["document"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["document"][field])): - del request_init["document"][field][i][subfield] - else: - del request_init["document"][field][subfield] + +@pytest.mark.parametrize( + "request_type", + [ + document_service.DeleteDocumentRequest, + dict, + ], +) +def test_delete_document_rest(request_type): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcd_document.Document( - name="name_value", - id="id_value", - schema_id="schema_id_value", - parent_document_id="parent_document_id_value", - json_data="json_data_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_document(request) + response = client.delete_document(request) # Establish that the response is the type that we expect. - assert isinstance(response, gcd_document.Document) - assert response.name == "name_value" - assert response.id == "id_value" - assert response.schema_id == "schema_id_value" - assert response.parent_document_id == "parent_document_id_value" + assert response is None -def test_update_document_rest_use_cached_wrapped_rpc(): +def test_delete_document_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5198,34 +5948,35 @@ def test_update_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_document in client._transport._wrapped_methods + assert client._transport.delete_document in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_document] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc request = {} - client.update_document(request) + client.delete_document(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_document(request) + client.delete_document(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_document_rest_required_fields( - request_type=document_service.UpdateDocumentRequest, +def test_delete_document_rest_required_fields( + request_type=document_service.DeleteDocumentRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5236,24 +5987,21 @@ def test_update_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_document._get_unset_required_fields(jsonified_request) + ).delete_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_document._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "allow_missing", - "update_mask", - ) - ) + ).delete_document._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5262,7 +6010,7 @@ def test_update_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = gcd_document.Document() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5274,48 +6022,36 @@ def test_update_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.update_document(request) + response = client.delete_document(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_document_rest_unset_required_fields(): +def test_delete_document_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_document._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "allowMissing", - "updateMask", - ) - ) - & set(("document",)) - ) + unset_fields = transport.delete_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_document_rest_interceptors(null_interceptor): +def test_delete_document_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5328,14 +6064,11 @@ def test_update_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_update_document" - ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_update_document" + transports.DocumentServiceRestInterceptor, "pre_delete_document" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = document_service.UpdateDocumentRequest.pb( - document_service.UpdateDocumentRequest() + pb_message = document_service.DeleteDocumentRequest.pb( + document_service.DeleteDocumentRequest() ) transcode.return_value = { "method": "post", @@ -5347,19 +6080,15 @@ def test_update_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = gcd_document.Document.to_json( - gcd_document.Document() - ) - request = document_service.UpdateDocumentRequest() + request = document_service.DeleteDocumentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gcd_document.Document() - client.update_document( + client.delete_document( request, metadata=[ ("key", "val"), @@ -5368,11 +6097,10 @@ def test_update_document_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() -def test_update_document_rest_bad_request( - transport: str = "rest", request_type=document_service.UpdateDocumentRequest +def test_delete_document_rest_bad_request( + transport: str = "rest", request_type=document_service.DeleteDocumentRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5381,9 +6109,7 @@ def test_update_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } request = request_type(**request_init) @@ -5396,10 +6122,10 @@ def test_update_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_document(request) + client.delete_document(request) -def test_update_document_rest_flattened(): +def test_delete_document_rest_flattened(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5408,53 +6134,40 @@ def test_update_document_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcd_document.Document() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { - "document": { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } + "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" } # get truthy value for each flattened field mock_args = dict( - document=gcd_document.Document( - struct_data=struct_pb2.Struct( - fields={ - "key_value": struct_pb2.Value( - null_value=struct_pb2.NullValue.NULL_VALUE - ) - } - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcd_document.Document.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - client.update_document(**mock_args) + client.delete_document(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{document.name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" + "%s/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" % client.transport._host, args[1], ) -def test_update_document_rest_flattened_error(transport: str = "rest"): +def test_delete_document_rest_flattened_error(transport: str = "rest"): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5463,22 +6176,13 @@ def test_update_document_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_document( - document_service.UpdateDocumentRequest(), - document=gcd_document.Document( - struct_data=struct_pb2.Struct( - fields={ - "key_value": struct_pb2.Value( - null_value=struct_pb2.NullValue.NULL_VALUE - ) - } - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_document( + document_service.DeleteDocumentRequest(), + name="name_value", ) -def test_update_document_rest_error(): +def test_delete_document_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5487,11 +6191,11 @@ def test_update_document_rest_error(): @pytest.mark.parametrize( "request_type", [ - document_service.DeleteDocumentRequest, + import_config.ImportDocumentsRequest, dict, ], ) -def test_delete_document_rest(request_type): +def test_import_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5499,29 +6203,29 @@ def test_delete_document_rest(request_type): # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_document(request) + response = client.import_documents(request) # Establish that the response is the type that we expect. - assert response is None + assert response.operation.name == "operations/spam" -def test_delete_document_rest_use_cached_wrapped_rpc(): +def test_import_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5535,35 +6239,41 @@ def test_delete_document_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_document in client._transport._wrapped_methods + assert client._transport.import_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc + client._transport._wrapped_methods[ + client._transport.import_documents + ] = mock_rpc request = {} - client.delete_document(request) + client.import_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_document(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_document_rest_required_fields( - request_type=document_service.DeleteDocumentRequest, +def test_import_documents_rest_required_fields( + request_type=import_config.ImportDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5574,21 +6284,21 @@ def test_delete_document_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_document._get_unset_required_fields(jsonified_request) + ).import_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_document._get_unset_required_fields(jsonified_request) + ).import_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5597,7 +6307,7 @@ def test_delete_document_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5609,36 +6319,37 @@ def test_delete_document_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.delete_document(request) + response = client.import_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_document_rest_unset_required_fields(): +def test_import_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_document._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.import_documents._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_document_rest_interceptors(null_interceptor): +def test_import_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5651,11 +6362,16 @@ def test_delete_document_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_delete_document" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_import_documents" + ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "pre_import_documents" ) as pre: pre.assert_not_called() - pb_message = document_service.DeleteDocumentRequest.pb( - document_service.DeleteDocumentRequest() + post.assert_not_called() + pb_message = import_config.ImportDocumentsRequest.pb( + import_config.ImportDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -5667,15 +6383,19 @@ def test_delete_document_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) - request = document_service.DeleteDocumentRequest() + request = import_config.ImportDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - client.delete_document( + client.import_documents( request, metadata=[ ("key", "val"), @@ -5684,10 +6404,11 @@ def test_delete_document_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() -def test_delete_document_rest_bad_request( - transport: str = "rest", request_type=document_service.DeleteDocumentRequest +def test_import_documents_rest_bad_request( + transport: str = "rest", request_type=import_config.ImportDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5696,7 +6417,7 @@ def test_delete_document_rest_bad_request( # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" } request = request_type(**request_init) @@ -5709,67 +6430,10 @@ def test_delete_document_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_document(request) - - -def test_delete_document_rest_flattened(): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4/documents/sample5" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.delete_document(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_document_rest_flattened_error(transport: str = "rest"): - client = DocumentServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_document( - document_service.DeleteDocumentRequest(), - name="name_value", - ) + client.import_documents(request) -def test_delete_document_rest_error(): +def test_import_documents_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -5778,11 +6442,11 @@ def test_delete_document_rest_error(): @pytest.mark.parametrize( "request_type", [ - import_config.ImportDocumentsRequest, + purge_config.PurgeDocumentsRequest, dict, ], ) -def test_import_documents_rest(request_type): +def test_purge_documents_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5806,13 +6470,13 @@ def test_import_documents_rest(request_type): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_documents(request) + response = client.purge_documents(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_import_documents_rest_use_cached_wrapped_rpc(): +def test_purge_documents_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5826,19 +6490,17 @@ def test_import_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.import_documents in client._transport._wrapped_methods + assert client._transport.purge_documents in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.import_documents - ] = mock_rpc + client._transport._wrapped_methods[client._transport.purge_documents] = mock_rpc request = {} - client.import_documents(request) + client.purge_documents(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -5847,20 +6509,21 @@ def test_import_documents_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.import_documents(request) + client.purge_documents(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_import_documents_rest_required_fields( - request_type=import_config.ImportDocumentsRequest, +def test_purge_documents_rest_required_fields( + request_type=purge_config.PurgeDocumentsRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["filter"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5871,21 +6534,24 @@ def test_import_documents_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_documents._get_unset_required_fields(jsonified_request) + ).purge_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" + jsonified_request["filter"] = "filter_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).import_documents._get_unset_required_fields(jsonified_request) + ).purge_documents._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "filter" in jsonified_request + assert jsonified_request["filter"] == "filter_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5919,24 +6585,32 @@ def test_import_documents_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.import_documents(request) + response = client.purge_documents(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_import_documents_rest_unset_required_fields(): +def test_purge_documents_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.import_documents._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + unset_fields = transport.purge_documents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "filter", + ) + ) + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_documents_rest_interceptors(null_interceptor): +def test_purge_documents_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5951,14 +6625,14 @@ def test_import_documents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_import_documents" + transports.DocumentServiceRestInterceptor, "post_purge_documents" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_import_documents" + transports.DocumentServiceRestInterceptor, "pre_purge_documents" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = import_config.ImportDocumentsRequest.pb( - import_config.ImportDocumentsRequest() + pb_message = purge_config.PurgeDocumentsRequest.pb( + purge_config.PurgeDocumentsRequest() ) transcode.return_value = { "method": "post", @@ -5974,7 +6648,7 @@ def test_import_documents_rest_interceptors(null_interceptor): operations_pb2.Operation() ) - request = import_config.ImportDocumentsRequest() + request = purge_config.PurgeDocumentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -5982,7 +6656,7 @@ def test_import_documents_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.import_documents( + client.purge_documents( request, metadata=[ ("key", "val"), @@ -5994,8 +6668,8 @@ def test_import_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_import_documents_rest_bad_request( - transport: str = "rest", request_type=import_config.ImportDocumentsRequest +def test_purge_documents_rest_bad_request( + transport: str = "rest", request_type=purge_config.PurgeDocumentsRequest ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6017,10 +6691,10 @@ def test_import_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.import_documents(request) + client.purge_documents(request) -def test_import_documents_rest_error(): +def test_purge_documents_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6029,11 +6703,11 @@ def test_import_documents_rest_error(): @pytest.mark.parametrize( "request_type", [ - purge_config.PurgeDocumentsRequest, + document_service.BatchGetDocumentsMetadataRequest, dict, ], ) -def test_purge_documents_rest(request_type): +def test_batch_get_documents_metadata_rest(request_type): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6048,22 +6722,26 @@ def test_purge_documents_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = document_service.BatchGetDocumentsMetadataResponse() # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.purge_documents(request) + response = client.batch_get_documents_metadata(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, document_service.BatchGetDocumentsMetadataResponse) -def test_purge_documents_rest_use_cached_wrapped_rpc(): +def test_batch_get_documents_metadata_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6077,40 +6755,40 @@ def test_purge_documents_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.purge_documents in client._transport._wrapped_methods + assert ( + client._transport.batch_get_documents_metadata + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.purge_documents] = mock_rpc + client._transport._wrapped_methods[ + client._transport.batch_get_documents_metadata + ] = mock_rpc request = {} - client.purge_documents(request) + client.batch_get_documents_metadata(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.purge_documents(request) + client.batch_get_documents_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_purge_documents_rest_required_fields( - request_type=purge_config.PurgeDocumentsRequest, +def test_batch_get_documents_metadata_rest_required_fields( + request_type=document_service.BatchGetDocumentsMetadataRequest, ): transport_class = transports.DocumentServiceRestTransport request_init = {} request_init["parent"] = "" - request_init["filter"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -6121,24 +6799,23 @@ def test_purge_documents_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).purge_documents._get_unset_required_fields(jsonified_request) + ).batch_get_documents_metadata._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" - jsonified_request["filter"] = "filter_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).purge_documents._get_unset_required_fields(jsonified_request) + ).batch_get_documents_metadata._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("matcher",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "filter" in jsonified_request - assert jsonified_request["filter"] == "filter_value" client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6147,7 +6824,7 @@ def test_purge_documents_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = document_service.BatchGetDocumentsMetadataResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6159,45 +6836,49 @@ def test_purge_documents_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.purge_documents(request) + response = client.batch_get_documents_metadata(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_purge_documents_rest_unset_required_fields(): +def test_batch_get_documents_metadata_rest_unset_required_fields(): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.purge_documents._get_unset_required_fields({}) + unset_fields = transport.batch_get_documents_metadata._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set(("matcher",)) & set( ( "parent", - "filter", + "matcher", ) ) ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_purge_documents_rest_interceptors(null_interceptor): +def test_batch_get_documents_metadata_rest_interceptors(null_interceptor): transport = transports.DocumentServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6210,16 +6891,14 @@ def test_purge_documents_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.DocumentServiceRestInterceptor, "post_purge_documents" + transports.DocumentServiceRestInterceptor, "post_batch_get_documents_metadata" ) as post, mock.patch.object( - transports.DocumentServiceRestInterceptor, "pre_purge_documents" + transports.DocumentServiceRestInterceptor, "pre_batch_get_documents_metadata" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = purge_config.PurgeDocumentsRequest.pb( - purge_config.PurgeDocumentsRequest() + pb_message = document_service.BatchGetDocumentsMetadataRequest.pb( + document_service.BatchGetDocumentsMetadataRequest() ) transcode.return_value = { "method": "post", @@ -6231,19 +6910,21 @@ def test_purge_documents_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson( - operations_pb2.Operation() + req.return_value._content = ( + document_service.BatchGetDocumentsMetadataResponse.to_json( + document_service.BatchGetDocumentsMetadataResponse() + ) ) - request = purge_config.PurgeDocumentsRequest() + request = document_service.BatchGetDocumentsMetadataRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = document_service.BatchGetDocumentsMetadataResponse() - client.purge_documents( + client.batch_get_documents_metadata( request, metadata=[ ("key", "val"), @@ -6255,8 +6936,9 @@ def test_purge_documents_rest_interceptors(null_interceptor): post.assert_called_once() -def test_purge_documents_rest_bad_request( - transport: str = "rest", request_type=purge_config.PurgeDocumentsRequest +def test_batch_get_documents_metadata_rest_bad_request( + transport: str = "rest", + request_type=document_service.BatchGetDocumentsMetadataRequest, ): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6278,10 +6960,71 @@ def test_purge_documents_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.purge_documents(request) + client.batch_get_documents_metadata(request) -def test_purge_documents_rest_error(): +def test_batch_get_documents_metadata_rest_flattened(): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = document_service.BatchGetDocumentsMetadataResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/dataStores/sample3/branches/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = document_service.BatchGetDocumentsMetadataResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.batch_get_documents_metadata(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/dataStores/*/branches/*}/batchGetDocumentsMetadata" + % client.transport._host, + args[1], + ) + + +def test_batch_get_documents_metadata_rest_flattened_error(transport: str = "rest"): + client = DocumentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_get_documents_metadata( + document_service.BatchGetDocumentsMetadataRequest(), + parent="parent_value", + ) + + +def test_batch_get_documents_metadata_rest_error(): client = DocumentServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -6433,6 +7176,7 @@ def test_document_service_base_transport(): "delete_document", "import_documents", "purge_documents", + "batch_get_documents_metadata", "get_operation", "cancel_operation", "list_operations", @@ -6737,6 +7481,9 @@ def test_document_service_client_transport_session_collision(transport_name): session1 = client1.transport.purge_documents._session session2 = client2.transport.purge_documents._session assert session1 != session2 + session1 = client1.transport.batch_get_documents_metadata._session + session2 = client2.transport.batch_get_documents_metadata._session + assert session1 != session2 def test_document_service_grpc_transport_channel(): diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_evaluation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_evaluation_service.py index 0fd27c82c7e8..e22d9e9f080e 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_evaluation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_evaluation_service.py @@ -3895,6 +3895,7 @@ def test_create_evaluation_rest(request_type): "include_citations": True, "ignore_adversarial_query": True, "ignore_non_summary_seeking_query": True, + "ignore_low_relevant_content": True, "model_prompt_spec": {"preamble": "preamble_value"}, "language_code": "language_code_value", "model_spec": {"version": "version_value"}, @@ -3934,6 +3935,7 @@ def test_create_evaluation_rest(request_type): "query_id": "query_id_value", "search_result_persistence_count": 3328, }, + "relevance_threshold": 1, }, "query_set_spec": {"sample_query_set": "sample_query_set_value"}, }, diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py index 33f10ee85214..1eddb9e8a3c5 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py @@ -2761,6 +2761,7 @@ def test_update_serving_config_rest(request_type): "include_citations": True, "ignore_adversarial_query": True, "ignore_non_summary_seeking_query": True, + "ignore_low_relevant_content": True, "model_prompt_spec": {"preamble": "preamble_value"}, "language_code": "language_code_value", "model_spec": {"version": "version_value"}, diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py index fe2c1010670e..b04e0007a9c2 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py @@ -2455,6 +2455,7 @@ def test_write_user_event_rest(request_type): "uri": "uri_value", "quantity": 895, "promotion_ids": ["promotion_ids_value1", "promotion_ids_value2"], + "joined": True, } ], "panel": {